Updated scrapers to latest available commit

This commit is contained in:
Gabisonfire
2024-01-17 16:43:58 -05:00
parent cab7f38c66
commit 909ade0d8e
39 changed files with 311 additions and 299 deletions

View File

@@ -1,8 +1,10 @@
const axios = require('axios');
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const { decode } = require("magnet-uri");
const Promises = require('../../lib/promises');
const { getRandomUserAgent } = require('./../../lib/requestHelper');
const { parseSize } = require("../scraperHelper");
const defaultProxies = [
'https://eztv.re'
@@ -19,7 +21,7 @@ function torrent(torrentId, config = {}, retries = 1) {
}
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/ep/${torrentId}`, config)))
.map(proxyUrl => singleRequest(`${proxyUrl}/ep/${torrentId}`, config)))
.then(body => parseTorrentPage(body))
.then(torrent => ({ torrentId, ...torrent }))
.catch(error => retries ? jitter().then(() => torrent(torrentId, config, retries - 1)) : Promise.reject(error));
@@ -33,7 +35,7 @@ function search(imdbId, config = {}, retries = 1) {
const page = config.page || 1;
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}&imdb_id=${id}`, config)))
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}&imdb_id=${id}`, config)))
.then(results => parseResults(results))
.then(torrents => torrents.length === limit && page < maxPage
? search(imdbId, { ...config, page: page + 1 }).catch(() => [])
@@ -46,27 +48,21 @@ function browse(config = {}, retries = 1) {
const page = config.page || 1;
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}`, config)))
.map(proxyUrl => singleRequest(`${proxyUrl}/api/get-torrents?limit=${limit}&page=${page}`, config)))
.then(results => parseResults(results))
.catch(error => retries ? jitter().then(() => browse(config, retries - 1)) : Promise.reject(error));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = {
userAgent: getRandomUserAgent(),
open_timeout: timeout,
response_timeout: timeout,
read_timeout: timeout,
follow: 2
};
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
return needle('get', requestUrl, options)
return axios.get(requestUrl, options)
.then(response => {
if (!response.body) {
if (!response.data) {
return Promise.reject(`No body: ${requestUrl}`);
}
return Promise.resolve(response.body);
return Promise.resolve(response.data);
});
}
@@ -99,10 +95,11 @@ function parseTorrentPage(body) {
reject(new Error('Failed loading body'));
}
const content = $('table[class="forum_header_border_normal"]');
const magnetLink = content.find('a[title="Magnet Link"]').attr('href');
const torrent = {
name: content.find('h1 > span').text().replace(/EZTV$/, ''),
infoHash: content.find('b:contains(\'Torrent Hash:\')')[0].nextSibling.data.trim().toLowerCase(),
magnetLink: content.find('a[title="Magnet Link"]').attr('href'),
infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink,
torrentLink: content.find('a[title="Download Torrent"]').attr('href'),
seeders: parseInt(content.find('span[class="stat_red"]').first().text(), 10) || 0,
size: parseSize(content.find('b:contains(\'Filesize:\')')[0].nextSibling.data),
@@ -113,21 +110,6 @@ function parseTorrentPage(body) {
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB') || sizeText.includes('kB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
}
function jitter() {
return Promises.delay(minDelay + Math.round(Math.random() * jitterDelay))
}