[addon] improve rd catalog request and reduce proxy cache time
This commit is contained in:
@@ -10,7 +10,7 @@ const USER_AGENT_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|agent`;
|
|||||||
const STREAM_TTL = process.env.STREAM_TTL || 4 * 60 * 60; // 4 hours
|
const STREAM_TTL = process.env.STREAM_TTL || 4 * 60 * 60; // 4 hours
|
||||||
const STREAM_EMPTY_TTL = process.env.STREAM_EMPTY_TTL || 30 * 60; // 30 minutes
|
const STREAM_EMPTY_TTL = process.env.STREAM_EMPTY_TTL || 30 * 60; // 30 minutes
|
||||||
const RESOLVED_URL_TTL = 2 * 60; // 2 minutes
|
const RESOLVED_URL_TTL = 2 * 60; // 2 minutes
|
||||||
const PROXY_TTL = 60 * 60; // 60 minutes
|
const PROXY_TTL = 30 * 60; // 60 minutes
|
||||||
const USER_AGENT_TTL = 2 * 24 * 60 * 60; // 2 days
|
const USER_AGENT_TTL = 2 * 24 * 60 * 60; // 2 days
|
||||||
// When the streams are empty we want to cache it for less time in case of timeouts or failures
|
// When the streams are empty we want to cache it for less time in case of timeouts or failures
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ const { cacheWrapProxy, cacheUserAgent, uncacheProxy } = require('../lib/cache')
|
|||||||
|
|
||||||
const MIN_SIZE = 15728640; // 15 MB
|
const MIN_SIZE = 15728640; // 15 MB
|
||||||
const CATALOG_MAX_PAGE = 5;
|
const CATALOG_MAX_PAGE = 5;
|
||||||
|
const CATALOG_PAGE_SIZE = 100;
|
||||||
const KEY = "realdebrid"
|
const KEY = "realdebrid"
|
||||||
|
|
||||||
async function getCachedStreams(streams, apiKey) {
|
async function getCachedStreams(streams, apiKey) {
|
||||||
@@ -75,11 +76,14 @@ async function getCatalog(apiKey, offset = 0) {
|
|||||||
const options = await getDefaultOptions(apiKey);
|
const options = await getDefaultOptions(apiKey);
|
||||||
const RD = new RealDebridClient(apiKey, options);
|
const RD = new RealDebridClient(apiKey, options);
|
||||||
let page = 1;
|
let page = 1;
|
||||||
return RD.torrents.get(page - 1, page)
|
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
|
||||||
.then(torrents => torrents && torrents.length === 50 && page < CATALOG_MAX_PAGE
|
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
|
||||||
? RD.torrents.get(page, page = page + 1).then(nextTorrents => torrents.concat(nextTorrents)).catch(() => [])
|
? RD.torrents.get(page, page = page + 1)
|
||||||
|
.then(nextTorrents => torrents.concat(nextTorrents))
|
||||||
|
.catch(() => torrents)
|
||||||
: torrents)
|
: torrents)
|
||||||
.then(torrents => (torrents || [])
|
.then(torrents => torrents && torrents.length ? torrents : [])
|
||||||
|
.then(torrents => torrents
|
||||||
.filter(torrent => statusReady(torrent.status))
|
.filter(torrent => statusReady(torrent.status))
|
||||||
.map(torrent => ({
|
.map(torrent => ({
|
||||||
id: `${KEY}:${torrent.id}`,
|
id: `${KEY}:${torrent.id}`,
|
||||||
|
|||||||
Reference in New Issue
Block a user