[scraper] adds eztv scraper

This commit is contained in:
TheBeastLT
2020-04-14 08:29:08 +02:00
parent a58843d75f
commit 7e959609ea
3 changed files with 208 additions and 0 deletions

View File

@@ -6,6 +6,7 @@ const { connect, getUpdateSeedersTorrents } = require('./lib/repository');
const thepiratebayScraper = require('./scrapers/thepiratebay/thepiratebay_scraper');
const horribleSubsScraper = require('./scrapers/horriblesubs/horriblesubs_scraper');
const ytsScraper = require('./scrapers/yts/yts_scraper');
const eztvScraper = require('./scrapers/eztv/eztv_scraper');
const leetxScraper = require('./scrapers/1337x/1337x_scraper');
const kickassScraper = require('./scrapers/kickass/kickass_scraper');
const rarbgScraper = require('./scrapers/rarbg/rarbg_scraper');
@@ -17,6 +18,7 @@ const PROVIDERS = [
// require('./scrapers/thepiratebay/thepiratebay_update_size_scraper')
// require('./scrapers/1337x/1337x_dump_scraper')
ytsScraper,
eztvScraper,
horribleSubsScraper,
rarbgScraper,
thepiratebayScraper,

View File

@@ -0,0 +1,128 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const Promises = require('../../lib/promises');
const { getRandomUserAgent } = require('./../../lib/request_helper');
const defaultProxies = [
'https://eztv.io'
];
const defaultTimeout = 30000;
const limit = 100;
const maxPage = 5;
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} search`));
}
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/ep/${torrentId}`, config)))
.then(body => parseTorrentPage(body))
.then(torrent => ({ torrentId, ...torrent }))
.catch(error => torrent(torrentId, config, retries - 1));
}
function search(imdbId, config = {}, retries = 2) {
if (!imdbId || retries === 0) {
return Promise.reject(new Error(`Failed ${imdbId} search`));
}
const id = imdbId.replace('tt', '');
const page = config.page || 1;
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}&imdb_id=${id}`, config)))
.then(results => parseResults(results))
.then(torrents => torrents.length === limit && page < maxPage
? search(imdbId, { ...config, page: page + 1 })
.catch(() => [])
.then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(error => search(imdbId, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const page = config.page || 1;
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}`, config)))
.then(results => parseResults(results))
.catch(error => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = { userAgent: getRandomUserAgent(), open_timeout: timeout, follow: 2 };
return needle('get', requestUrl, options)
.then(response => {
if (!response.body) {
return Promise.reject(`No body: ${requestUrl}`);
}
return Promise.resolve(response.body);
});
}
function parseResults(results) {
if (!results || !Array.isArray(results.torrents)) {
console.log('Incorrect results: ', results);
return Promise.reject('Incorrect results')
}
return results.torrents.map(torrent => parseTorrent(torrent));
}
function parseTorrent(torrent) {
return {
name: torrent.title.replace(/EZTV$/, ''),
torrentId: torrent.episode_url.replace(/.*\/ep\//, ''),
infoHash: torrent.hash.trim().toLowerCase(),
magnetLink: torrent.magnet_url,
torrentLink: torrent.torrent_url,
seeders: torrent.seeds,
size: torrent.size_bytes,
uploadDate: new Date(torrent.date_released_unix * 1000),
imdbId: torrent.imdb_id !== '0' && 'tt' + torrent.imdb_id || undefined
}
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const content = $('table[class="forum_header_border_normal"]');
const torrent = {
name: content.find('h1 > span').text().replace(/EZTV$/, ''),
infoHash: content.find('b:contains(\'Torrent Hash:\')')[0].nextSibling.data.trim().toLowerCase(),
magnetLink: content.find('a[title="Magnet Link"]').attr('href'),
torrentLink: content.find('a[title="Download Torrent"]').attr('href'),
seeders: parseInt(content.find('span[class="stat_red"]').first().text(), 10) || 0,
size: parseSize(content.find('b:contains(\'Filesize:\')')[0].nextSibling.data),
uploadDate: moment(content.find('b:contains(\'Released:\')')[0].nextSibling.data, 'Do MMM YYYY').toDate(),
showUrl: content.find('.episode_left_column a').attr('href')
};
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB') || sizeText.includes('kB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
}
module.exports = { torrent, search, browse };

View File

@@ -0,0 +1,78 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const eztv = require('./eztv_api');
const { Type } = require('../../lib/types');
const Promises = require('../../lib/promises');
const repository = require('../../lib/repository');
const { updateCurrentSeeders } = require('../../lib/torrent');
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
const NAME = 'EZTV';
const UNTIL_PAGE = 10;
const limiter = new Bottleneck({ maxConcurrent: 20 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
return scrapeLatestTorrents()
.then(() => {
lastScrape.lastScraped = scrapeStart;
return lastScrape.save();
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function updateSeeders(torrent) {
return limiter.schedule(() => eztv.torrent(torrent.torrentId)
.then(record => (torrent.seeders = record.seeders, torrent))
.catch(() => updateCurrentSeeders(torrent))
.then(updated => updateTorrentSeeders(updated)));
}
async function scrapeLatestTorrents() {
return scrapeLatestTorrentsForCategory(1654);
}
async function scrapeLatestTorrentsForCategory(page = 1) {
console.log(`Scrapping ${NAME} page ${page}`);
return eztv.browse(({ page }))
.catch(error => {
console.warn(`Failed ${NAME} scrapping for [${page}] due: `, error);
// return Promises.delay(30000).then(() => scrapeLatestTorrentsForCategory(page))
return Promise.resolve([]);
})
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
? scrapeLatestTorrentsForCategory(page + 1)
: Promise.resolve());
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
if (!record || !record.size) {
return Promise.resolve('Invalid torrent record');
}
const torrent = {
infoHash: record.infoHash,
provider: NAME,
torrentId: record.torrentId,
title: record.name.replace(/\t|\s+/g, ' ').trim(),
type: Type.SERIES,
size: record.size,
seeders: record.seeders,
uploadDate: record.uploadDate,
imdbId: record.imdbId,
};
return createTorrentEntry(torrent).then(() => torrent);
}
module.exports = { scrape, updateSeeders, NAME };