[addon] cache rd availability results

This commit is contained in:
TheBeastLT
2021-06-16 12:59:31 +02:00
parent e5ba5e6b18
commit f3d326a55a
3 changed files with 72 additions and 14 deletions

View File

@@ -3,6 +3,7 @@ const mangodbStore = require('cache-manager-mongodb');
const GLOBAL_KEY_PREFIX = 'torrentio-addon'; const GLOBAL_KEY_PREFIX = 'torrentio-addon';
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`; const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
const AVAILABILITY_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|availability`;
const RESOLVED_URL_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|resolved`; const RESOLVED_URL_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|resolved`;
const STREAM_TTL = process.env.STREAM_TTL || 4 * 60 * 60; // 4 hours const STREAM_TTL = process.env.STREAM_TTL || 4 * 60 * 60; // 4 hours
@@ -43,7 +44,8 @@ function initiateRemoteCache() {
function initiateMemoryCache() { function initiateMemoryCache() {
return cacheManager.caching({ return cacheManager.caching({
store: 'memory', store: 'memory',
ttl: RESOLVED_URL_TTL ttl: RESOLVED_URL_TTL,
max: Infinity// infinite LRU cache size
}); });
} }
@@ -64,5 +66,36 @@ function cacheWrapResolvedUrl(id, method) {
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, { ttl: RESOLVED_URL_TTL }); return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, { ttl: RESOLVED_URL_TTL });
} }
module.exports = { cacheWrapStream, cacheWrapResolvedUrl }; function cacheAvailabilityResults(results) {
const flatResults = Object.keys(results)
.map(infoHash => [`${AVAILABILITY_KEY_PREFIX}:${infoHash}`, results[infoHash]])
.reduce((a, b) => a.concat(b), []);
memoryCache.mset(...flatResults, { ttl: STREAM_TTL }, (error) => {
if (error) {
console.log('Failed storing availability cache', error);
}
});
return results;
}
function getCachedAvailabilityResults(infoHashes) {
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
return new Promise(resolve => {
memoryCache.mget(...keys, (error, result) => {
if (error) {
console.log('Failed retrieve availability cache', error)
return resolve({});
}
const availabilityResults = {};
infoHashes.forEach((infoHash, index) => {
if (result[index]) {
availabilityResults[infoHash] = result[index];
}
});
resolve(availabilityResults);
})
});
}
module.exports = { cacheWrapStream, cacheWrapResolvedUrl, cacheAvailabilityResults, getCachedAvailabilityResults };

View File

@@ -6,4 +6,4 @@ function chunkArray(arr, size) {
: [arr]; : [arr];
} }
module.exports = { chunkArray, BadTokenError } module.exports = { chunkArray, BadTokenError }

View File

@@ -2,6 +2,7 @@ const RealDebridClient = require('real-debrid-api');
const { Type } = require('../lib/types'); const { Type } = require('../lib/types');
const { isVideo, isArchive } = require('../lib/extension'); const { isVideo, isArchive } = require('../lib/extension');
const { delay } = require('../lib/promises'); const { delay } = require('../lib/promises');
const { cacheAvailabilityResults, getCachedAvailabilityResults } = require('../lib/cache');
const StaticResponse = require('./static'); const StaticResponse = require('./static');
const { getMagnetLink } = require('../lib/magnetHelper'); const { getMagnetLink } = require('../lib/magnetHelper');
const { chunkArray, BadTokenError } = require('./mochHelper'); const { chunkArray, BadTokenError } = require('./mochHelper');
@@ -29,17 +30,24 @@ async function getCachedStreams(streams, apiKey) {
} }
async function _getInstantAvailable(hashes, apiKey, retries = 3) { async function _getInstantAvailable(hashes, apiKey, retries = 3) {
const options = await getDefaultOptions(); const cachedResults = await getCachedAvailabilityResults(hashes);
const RD = new RealDebridClient(apiKey, options); const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
const hashBatches = chunkArray(hashes, 150) if (!missingHashes.length) {
return cachedResults
}
const RD = new RealDebridClient(apiKey, getDefaultOptions());
const hashBatches = chunkArray(missingHashes, 150)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch) return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => { .then(response => {
if (typeof response !== 'object') { if (typeof response !== 'object') {
return Promise.reject(new Error('RD returned non JSON response: ' + response)); return Promise.reject(new Error('RD returned non JSON response: ' + response));
} }
return response; return processAvailabilityResults(response);
}))) })))
.then(results => results.reduce((all, result) => Object.assign(all, result), {})) .then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => { .catch(error => {
if (error && error.code === 8) { if (error && error.code === 8) {
return Promise.reject(BadTokenError); return Promise.reject(BadTokenError);
@@ -52,18 +60,35 @@ async function _getInstantAvailable(hashes, apiKey, retries = 3) {
}); });
} }
function _getCachedFileIds(fileIndex, hosterResults) { function processAvailabilityResults(availabilityResults) {
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
}
function getCachedIds(hosterResults) {
if (!hosterResults || Array.isArray(hosterResults)) { if (!hosterResults || Array.isArray(hosterResults)) {
return []; return [];
} }
// if not all cached files are videos, then the torrent will be zipped to a rar // if not all cached files are videos, then the torrent will be zipped to a rar
const cachedTorrents = Object.values(hosterResults) return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), []) .reduce((a, b) => a.concat(b), [])
.filter(cached => !Number.isInteger(fileIndex) && Object.keys(cached).length || cached[fileIndex + 1]) .filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.filter(cached => Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached)) .map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length); .sort((a, b) => b.length - a.length)
return cachedTorrents.length && cachedTorrents[0] || []; .filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
}
function _getCachedFileIds(fileIndex, cachedResults) {
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
} }
async function getCatalog(apiKey, offset, ip) { async function getCatalog(apiKey, offset, ip) {
@@ -302,4 +327,4 @@ async function getDefaultOptions(ip) {
return { ip, timeout: 30000 }; return { ip, timeout: 30000 };
} }
module.exports = { getCachedStreams, resolve, getCatalog, getItemMeta }; module.exports = { getCachedStreams, resolve, getCatalog, getItemMeta };