Pull changes from Torrentio
This commit is contained in:
@@ -27,7 +27,7 @@ export async function getCachedStreams(streams, apiKey) {
|
|||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[stream.infoHash] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
cached: cachedEntry?.instant
|
cached: cachedEntry?.instant
|
||||||
}
|
}
|
||||||
@@ -142,10 +142,10 @@ async function _createTorrent(AD, infoHash) {
|
|||||||
|
|
||||||
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
|
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
|
||||||
const targetFileName = decodeURIComponent(encodedFileName);
|
const targetFileName = decodeURIComponent(encodedFileName);
|
||||||
const videos = torrent.links.filter(link => isVideo(link.filename));
|
const videos = torrent.links.filter(link => isVideo(link.filename)).sort((a, b) => b.size - a.size);
|
||||||
const targetVideo = Number.isInteger(fileIndex)
|
const targetVideo = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(targetFileName, video.filename))
|
? videos.find(video => sameFilename(targetFileName, video.filename))
|
||||||
: videos.sort((a, b) => b.size - a.size)[0];
|
: videos[0];
|
||||||
|
|
||||||
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
|
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
|
||||||
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
|
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export async function getCachedStreams(streams, apiKey) {
|
|||||||
return available && streams
|
return available && streams
|
||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const cachedEntry = available[stream.infoHash];
|
const cachedEntry = available[stream.infoHash];
|
||||||
mochStreams[stream.infoHash] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
||||||
cached: !!cachedEntry
|
cached: !!cachedEntry
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -173,7 +173,7 @@ function processMochResults(streams, config, results) {
|
|||||||
|
|
||||||
function populateCachedLinks(streams, mochResult, config) {
|
function populateCachedLinks(streams, mochResult, config) {
|
||||||
return streams.map(stream => {
|
return streams.map(stream => {
|
||||||
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
|
const cachedEntry = stream.infoHash && mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
|
||||||
if (cachedEntry?.cached) {
|
if (cachedEntry?.cached) {
|
||||||
return {
|
return {
|
||||||
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
|
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
|
||||||
@@ -190,7 +190,7 @@ function populateDownloadLinks(streams, mochResults, config) {
|
|||||||
const torrentStreams = streams.filter(stream => stream.infoHash);
|
const torrentStreams = streams.filter(stream => stream.infoHash);
|
||||||
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
|
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
|
||||||
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
|
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
|
||||||
const cachedEntry = mochResult.mochStreams[stream.infoHash];
|
const cachedEntry = mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
|
||||||
const isCached = cachedEntry?.cached;
|
const isCached = cachedEntry?.cached;
|
||||||
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
|
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
|
||||||
streams.push({
|
streams.push({
|
||||||
|
|||||||
@@ -27,10 +27,9 @@ export async function getCachedStreams(streams, apiKey) {
|
|||||||
const isCached = available.includes(stream.infoHash);
|
const isCached = available.includes(stream.infoHash);
|
||||||
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
|
||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[stream.infoHash] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${stream.fileIdx}`,
|
||||||
cached: isCached
|
cached: isCached
|
||||||
};
|
};
|
||||||
return mochStreams;
|
return mochStreams;
|
||||||
@@ -137,7 +136,8 @@ async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
|
|||||||
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
|
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
|
||||||
const targetFileName = decodeURIComponent(cachedEntryInfo);
|
const targetFileName = decodeURIComponent(cachedEntryInfo);
|
||||||
const files = await _getFileUrls(OC, torrent)
|
const files = await _getFileUrls(OC, torrent)
|
||||||
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|
const targetFile = files.find(file => file.includes(`/${torrent.requestId}/${fileIndex}/`))
|
||||||
|
|| files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|
||||||
|| files.find(file => isVideo(file))
|
|| files.find(file => isVideo(file))
|
||||||
|| files.pop();
|
|| files.pop();
|
||||||
|
|
||||||
|
|||||||
@@ -124,10 +124,10 @@ async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBr
|
|||||||
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
|
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
|
||||||
if (cachedTorrent?.content?.length) {
|
if (cachedTorrent?.content?.length) {
|
||||||
const targetFileName = decodeURIComponent(encodedFileName);
|
const targetFileName = decodeURIComponent(encodedFileName);
|
||||||
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
|
const videos = cachedTorrent.content.filter(file => isVideo(file.path)).sort((a, b) => b.size - a.size);
|
||||||
const targetVideo = Number.isInteger(fileIndex)
|
const targetVideo = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(video.path, targetFileName))
|
? videos.find(video => sameFilename(video.path, targetFileName))
|
||||||
: videos.sort((a, b) => b.size - a.size)[0];
|
: videos[0];
|
||||||
if (!targetVideo && videos.every(video => isArchive(video.path))) {
|
if (!targetVideo && videos.every(video => isArchive(video.path))) {
|
||||||
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
|
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
|
||||||
return StaticResponse.FAILED_RAR;
|
return StaticResponse.FAILED_RAR;
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export async function getCachedStreams(streams, apiKey) {
|
|||||||
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
const fileName = streamTitleParts[streamTitleParts.length - 1];
|
||||||
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
|
||||||
const encodedFileName = encodeURIComponent(fileName);
|
const encodedFileName = encodeURIComponent(fileName);
|
||||||
mochStreams[stream.infoHash] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
|
||||||
cached: false
|
cached: false
|
||||||
};
|
};
|
||||||
@@ -168,12 +168,12 @@ async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
|
|||||||
|
|
||||||
while (!targetFile && files.length) {
|
while (!targetFile && files.length) {
|
||||||
const folders = files.filter(file => file.file_type === 'FOLDER');
|
const folders = files.filter(file => file.file_type === 'FOLDER');
|
||||||
videos = videos.concat(files.filter(file => isVideo(file.name)));
|
videos = videos.concat(files.filter(file => isVideo(file.name))).sort((a, b) => b.size - a.size);
|
||||||
// when specific file index is defined search by filename
|
// when specific file index is defined search by filename
|
||||||
// when it's not defined find all videos and take the largest one
|
// when it's not defined find all videos and take the largest one
|
||||||
targetFile = Number.isInteger(fileIndex)
|
targetFile = Number.isInteger(fileIndex)
|
||||||
? videos.find(video => sameFilename(targetFileName, video.name))
|
? videos.find(video => sameFilename(targetFileName, video.name))
|
||||||
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
|
: !folders.length && videos[0];
|
||||||
files = !targetFile
|
files = !targetFile
|
||||||
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
|
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
|
||||||
.then(results => results.reduce((a, b) => a.concat(b), []))
|
.then(results => results.reduce((a, b) => a.concat(b), []))
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ export async function getCachedStreams(streams, apiKey) {
|
|||||||
.reduce((mochStreams, stream) => {
|
.reduce((mochStreams, stream) => {
|
||||||
const cachedEntry = available[stream.infoHash];
|
const cachedEntry = available[stream.infoHash];
|
||||||
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
|
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
|
||||||
mochStreams[stream.infoHash] = {
|
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
|
||||||
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
|
||||||
cached: !!cachedIds.length
|
cached: !!cachedIds.length
|
||||||
};
|
};
|
||||||
@@ -395,5 +395,5 @@ function infringingFile(error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getDefaultOptions(ip) {
|
async function getDefaultOptions(ip) {
|
||||||
return { ip, timeout: 10000 };
|
return { ip, timeout: 15000 };
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user