mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
add yts full scan scraper
This commit is contained in:
@@ -1,6 +1,7 @@
|
|||||||
const thepiratebayScraper = require('../scrapers/thepiratebay/thepiratebay_scraper');
|
const thepiratebayScraper = require('../scrapers/thepiratebay/thepiratebay_scraper');
|
||||||
const thepiratebayFakeRemoval = require('../scrapers/thepiratebay/thepiratebay_fakes_removal');
|
const thepiratebayFakeRemoval = require('../scrapers/thepiratebay/thepiratebay_fakes_removal');
|
||||||
const ytsScraper = require('../scrapers/yts/yts_scraper');
|
const ytsScraper = require('../scrapers/yts/yts_scraper');
|
||||||
|
const ytsFullScraper = require('../scrapers/yts/yts_full_scraper');
|
||||||
const eztvScraper = require('../scrapers/eztv/eztv_scraper');
|
const eztvScraper = require('../scrapers/eztv/eztv_scraper');
|
||||||
const leetxScraper = require('../scrapers/1337x/1337x_scraper');
|
const leetxScraper = require('../scrapers/1337x/1337x_scraper');
|
||||||
const rarbgScraper = require('../scrapers/rarbg/rarbg_scraper');
|
const rarbgScraper = require('../scrapers/rarbg/rarbg_scraper');
|
||||||
@@ -19,6 +20,7 @@ const torrent9Scraper = require('../scrapers/torrent9/torrent9_scraper');
|
|||||||
|
|
||||||
module.exports = [
|
module.exports = [
|
||||||
{ scraper: ytsScraper, name: ytsScraper.NAME, cron: '0 0 */4 ? * *' },
|
{ scraper: ytsScraper, name: ytsScraper.NAME, cron: '0 0 */4 ? * *' },
|
||||||
|
{ scraper: ytsFullScraper, name: ytsFullScraper.NAME, cron: '0 0 0 * * 0' },
|
||||||
{ scraper: eztvScraper, name: eztvScraper.NAME, cron: '0 0 */4 ? * *' },
|
{ scraper: eztvScraper, name: eztvScraper.NAME, cron: '0 0 */4 ? * *' },
|
||||||
{ scraper: nyaaSiScraper, name: nyaaSiScraper.NAME, cron: '0 0 */4 ? * *' },
|
{ scraper: nyaaSiScraper, name: nyaaSiScraper.NAME, cron: '0 0 */4 ? * *' },
|
||||||
{ scraper: nyaaPantsuScraper, name: nyaaPantsuScraper.NAME, cron: '0 0 */4 ? * *' },
|
{ scraper: nyaaPantsuScraper, name: nyaaPantsuScraper.NAME, cron: '0 0 */4 ? * *' },
|
||||||
|
|||||||
@@ -42,6 +42,12 @@ function browse(config = {}, retries = 2) {
|
|||||||
.catch(error => browse(config, retries - 1));
|
.catch(error => browse(config, retries - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function maxPage() {
|
||||||
|
return Promises.first(defaultProxies
|
||||||
|
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}`)))
|
||||||
|
.then(results => Math.round((results?.data?.movie_count || 0) / limit))
|
||||||
|
}
|
||||||
|
|
||||||
function singleRequest(requestUrl, config = {}) {
|
function singleRequest(requestUrl, config = {}) {
|
||||||
const timeout = config.timeout || defaultTimeout;
|
const timeout = config.timeout || defaultTimeout;
|
||||||
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
|
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
|
||||||
@@ -89,4 +95,4 @@ function formatType(type) {
|
|||||||
return type.toUpperCase();
|
return type.toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { torrent, search, browse };
|
module.exports = { torrent, search, browse, maxPage };
|
||||||
15
scraper/scrapers/yts/yts_full_scraper.js
Normal file
15
scraper/scrapers/yts/yts_full_scraper.js
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
const moment = require("moment");
|
||||||
|
const yts = require('./yts_api');
|
||||||
|
const scraper = require('./yts_scraper')
|
||||||
|
|
||||||
|
|
||||||
|
async function scrape() {
|
||||||
|
const scrapeStart = moment();
|
||||||
|
console.log(`[${scrapeStart}] starting ${scraper.NAME} full scrape...`);
|
||||||
|
|
||||||
|
return yts.maxPage()
|
||||||
|
.then(maxPage => scraper.scrape(maxPage))
|
||||||
|
.then(() => console.log(`[${moment()}] finished ${scraper.NAME} full scrape`));
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { scrape, NAME: scraper.NAME };
|
||||||
@@ -10,12 +10,12 @@ const UNTIL_PAGE = 10;
|
|||||||
|
|
||||||
const limiter = new Bottleneck({ maxConcurrent: 10 });
|
const limiter = new Bottleneck({ maxConcurrent: 10 });
|
||||||
|
|
||||||
async function scrape() {
|
async function scrape(maxPage) {
|
||||||
const scrapeStart = moment();
|
const scrapeStart = moment();
|
||||||
const lastScrape = await repository.getProvider({ name: NAME });
|
const lastScrape = await repository.getProvider({ name: NAME });
|
||||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||||
|
|
||||||
return scrapeLatestTorrents()
|
return scrapeLatestTorrentsForCategory(maxPage)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
lastScrape.lastScraped = scrapeStart;
|
lastScrape.lastScraped = scrapeStart;
|
||||||
return lastScrape.save();
|
return lastScrape.save();
|
||||||
@@ -27,11 +27,7 @@ async function updateSeeders(torrent) {
|
|||||||
return limiter.schedule(() => yts.torrent(torrent.torrentId));
|
return limiter.schedule(() => yts.torrent(torrent.torrentId));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scrapeLatestTorrents() {
|
async function scrapeLatestTorrentsForCategory(maxPage = UNTIL_PAGE, page = 1) {
|
||||||
return scrapeLatestTorrentsForCategory();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function scrapeLatestTorrentsForCategory(page = 1) {
|
|
||||||
console.log(`Scrapping ${NAME} page ${page}`);
|
console.log(`Scrapping ${NAME} page ${page}`);
|
||||||
return yts.browse(({ page }))
|
return yts.browse(({ page }))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
@@ -39,8 +35,8 @@ async function scrapeLatestTorrentsForCategory(page = 1) {
|
|||||||
return Promise.resolve([]);
|
return Promise.resolve([]);
|
||||||
})
|
})
|
||||||
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||||
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
.then(resolved => resolved.length > 0 && page < maxPage
|
||||||
? scrapeLatestTorrentsForCategory(page + 1)
|
? scrapeLatestTorrentsForCategory(maxPage, page + 1)
|
||||||
: Promise.resolve());
|
: Promise.resolve());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user