mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
[scraper] update seeders from trackers directly
This commit is contained in:
@@ -1,22 +1,19 @@
|
||||
const Bottleneck = require('bottleneck');
|
||||
const scrapers = require('./scrapers');
|
||||
const repository = require('../lib/repository')
|
||||
const { delay, timeout } = require('../lib/promises')
|
||||
const { delay } = require('../lib/promises')
|
||||
const { updateCurrentSeeders } = require('../lib/torrent')
|
||||
const { updateTorrentSeeders } = require('../lib/torrentEntries')
|
||||
|
||||
const DELAY_MS = 15 * 1000; // 15 seconds
|
||||
const TIMEOUT_MS = 30 * 1000 // 30 seconds
|
||||
const FALLBACK_SCRAPER = { updateSeeders: () => [] };
|
||||
const limiter = new Bottleneck({ maxConcurrent: 20, minTime: 250 });
|
||||
const updateLimiter = new Bottleneck({ maxConcurrent: 5 });
|
||||
const forceSeedersLimiter = new Bottleneck({ maxConcurrent: 5 });
|
||||
const statistics = {};
|
||||
|
||||
function scheduleUpdateSeeders() {
|
||||
console.log('Starting seeders update...')
|
||||
return repository.getUpdateSeedersTorrents()
|
||||
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => _updateSeeders(torrent)))))
|
||||
return repository.getUpdateSeedersTorrents(50)
|
||||
.then(torrents => updateCurrentSeeders(torrents))
|
||||
.then(updatedTorrents => Promise.all(
|
||||
updatedTorrents.map(updated => updateLimiter.schedule(() => updateTorrentSeeders(updated)))))
|
||||
.then(torrents => updateStatistics(torrents))
|
||||
.then(() => console.log('Finished seeders update:', statistics))
|
||||
.catch(error => console.warn('Failed seeders update:', error))
|
||||
@@ -24,31 +21,6 @@ function scheduleUpdateSeeders() {
|
||||
.then(() => scheduleUpdateSeeders());
|
||||
}
|
||||
|
||||
async function _updateSeeders(torrent) {
|
||||
const provider = await scrapers.find(provider => provider.name === torrent.provider);
|
||||
const scraper = provider ? provider.scraper : FALLBACK_SCRAPER;
|
||||
|
||||
const updatedTorrents = await timeout(TIMEOUT_MS, scraper.updateSeeders(torrent, getImdbIdsMethod(torrent)))
|
||||
.then(updated => Array.isArray(updated) ? updated : [updated])
|
||||
.catch(error => {
|
||||
console.warn(`Failed seeders update ${torrent.provider} [${torrent.infoHash}]: `, error)
|
||||
return []
|
||||
});
|
||||
|
||||
if (!updatedTorrents.find(updated => updated.infoHash === torrent.infoHash)) {
|
||||
await forceSeedersLimiter.schedule(() => updateCurrentSeeders(torrent))
|
||||
.then(updated => updatedTorrents.push(updated));
|
||||
}
|
||||
|
||||
return Promise.all(updatedTorrents.map(updated => updateLimiter.schedule(() => updateTorrentSeeders(updated))));
|
||||
}
|
||||
|
||||
function getImdbIdsMethod(torrent) {
|
||||
return () => repository.getFiles(torrent)
|
||||
.then(files => files.map(file => file.imdbId).filter(id => id))
|
||||
.then(ids => Array.from(new Set(ids)));
|
||||
}
|
||||
|
||||
function updateStatistics(updatedTorrents) {
|
||||
const totalTorrents = updatedTorrents.map(nested => nested.length).reduce((a, b) => a + b, 0);
|
||||
const date = new Date().toISOString().replace(/T.*/, '');
|
||||
|
||||
Reference in New Issue
Block a user