diff --git a/addon/lib/cache.js b/addon/lib/cache.js index 9158eca..3448032 100644 --- a/addon/lib/cache.js +++ b/addon/lib/cache.js @@ -10,7 +10,7 @@ const USER_AGENT_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|agent`; const STREAM_TTL = process.env.STREAM_TTL || 4 * 60 * 60; // 4 hours const STREAM_EMPTY_TTL = process.env.STREAM_EMPTY_TTL || 30 * 60; // 30 minutes const RESOLVED_URL_TTL = 2 * 60; // 2 minutes -const PROXY_TTL = 60 * 60; // 60 minutes +const PROXY_TTL = 30 * 60; // 60 minutes const USER_AGENT_TTL = 2 * 24 * 60 * 60; // 2 days // When the streams are empty we want to cache it for less time in case of timeouts or failures diff --git a/addon/moch/realdebrid.js b/addon/moch/realdebrid.js index 02ceab2..0c46227 100644 --- a/addon/moch/realdebrid.js +++ b/addon/moch/realdebrid.js @@ -9,6 +9,7 @@ const { cacheWrapProxy, cacheUserAgent, uncacheProxy } = require('../lib/cache') const MIN_SIZE = 15728640; // 15 MB const CATALOG_MAX_PAGE = 5; +const CATALOG_PAGE_SIZE = 100; const KEY = "realdebrid" async function getCachedStreams(streams, apiKey) { @@ -75,11 +76,14 @@ async function getCatalog(apiKey, offset = 0) { const options = await getDefaultOptions(apiKey); const RD = new RealDebridClient(apiKey, options); let page = 1; - return RD.torrents.get(page - 1, page) - .then(torrents => torrents && torrents.length === 50 && page < CATALOG_MAX_PAGE - ? RD.torrents.get(page, page = page + 1).then(nextTorrents => torrents.concat(nextTorrents)).catch(() => []) + return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE) + .then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE + ? RD.torrents.get(page, page = page + 1) + .then(nextTorrents => torrents.concat(nextTorrents)) + .catch(() => torrents) : torrents) - .then(torrents => (torrents || []) + .then(torrents => torrents && torrents.length ? torrents : []) + .then(torrents => torrents .filter(torrent => statusReady(torrent.status)) .map(torrent => ({ id: `${KEY}:${torrent.id}`,