chunk PM cached hashes query

This commit is contained in:
TheBeastLT
2022-01-05 17:15:49 +01:00
parent 6b1c0a90ad
commit e23fa9bb55

View File

@@ -4,34 +4,40 @@ const { Type } = require('../lib/types');
const { isVideo } = require('../lib/extension'); const { isVideo } = require('../lib/extension');
const StaticResponse = require('./static'); const StaticResponse = require('./static');
const { getMagnetLink } = require('../lib/magnetHelper'); const { getMagnetLink } = require('../lib/magnetHelper');
const { BadTokenError } = require('./mochHelper'); const { BadTokenError, chunkArray } = require('./mochHelper');
const KEY = 'premiumize'; const KEY = 'premiumize';
async function getCachedStreams(streams, apiKey) { async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions(); const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options); const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
}
async function _getCachedStreams(PM, apiKey, streams) {
const hashes = streams.map(stream => stream.infoHash); const hashes = streams.map(stream => stream.infoHash);
const available = await PM.cache.check(hashes) return PM.cache.check(hashes)
.catch(error => { .catch(error => {
if (error && error.message === 'customer_id and pin parameter missing or not logged in ') { if (error && error.message === 'customer_id and pin parameter missing or not logged in ') {
return Promise.reject(BadTokenError); return Promise.reject(BadTokenError);
} }
console.warn('Failed Premiumize cached torrent availability request:', error); console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined; return undefined;
}); })
return available && streams .then(available => streams
.reduce((mochStreams, stream, index) => { .reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n'); const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1]; const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null; const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName); const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = { mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`, url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available.response[index] cached: available && available.response[index]
}; };
return mochStreams; return mochStreams;
}, {}) }, {}));
} }
async function getCatalog(apiKey, offset = 0) { async function getCatalog(apiKey, offset = 0) {
@@ -74,8 +80,8 @@ async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
return PM.folder.list(itemId, null, ip) return PM.folder.list(itemId, null, ip)
.then(response => response.content) .then(response => response.content)
.then(contents => Promise.all(contents .then(contents => Promise.all(contents
.filter(content => content.type === 'folder') .filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/')))) .map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), [])) .then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents .then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name)) .filter(content => content.type === 'file' && isVideo(content.name))