mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
[scraper] rework scraper scheduling and added seeders updating
This commit is contained in:
25
scraper/scheduler/scraper.js
Normal file
25
scraper/scheduler/scraper.js
Normal file
@@ -0,0 +1,25 @@
|
||||
const scrapers = require('./scrapers');
|
||||
const { delay, sequence } = require('../lib/promises')
|
||||
|
||||
function scheduleScraping() {
|
||||
return scrapers.forEach(provider => _continuousScrape(provider))
|
||||
}
|
||||
|
||||
function scrapeAll() {
|
||||
return sequence(scrapers.map(provider => () => _singleScrape(provider)))
|
||||
}
|
||||
|
||||
async function _continuousScrape(provider) {
|
||||
return _singleScrape(provider)
|
||||
.then(() => delay(provider.scrapeInterval))
|
||||
.then(() => _continuousScrape(provider))
|
||||
}
|
||||
|
||||
async function _singleScrape(provider) {
|
||||
return provider.scraper.scrape().catch(error => {
|
||||
console.warn(`Failed ${provider.name} scraping due: `, error);
|
||||
return Promise.resolve()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { scheduleScraping, scrapeAll }
|
||||
Reference in New Issue
Block a user