Updated scrapers to latest available commit

This commit is contained in:
Gabisonfire
2024-01-17 16:43:58 -05:00
parent cab7f38c66
commit 909ade0d8e
39 changed files with 311 additions and 299 deletions

View File

@@ -1,9 +1,10 @@
const axios = require('axios');
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const decode = require('magnet-uri');
const Promises = require('../../lib/promises');
const { getRandomUserAgent } = require('../../lib/requestHelper');
const { parseSize } = require("../scraperHelper");
const defaultProxies = [
// 'https://torrentgalaxy.to',
@@ -34,7 +35,7 @@ function torrent(torrentId, config = {}, retries = 2) {
const proxyList = config.proxyList || defaultProxies;
return Promises.first(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`)))
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId, ...torrent }))
.catch((err) => torrent(torrentId, config, retries - 1));
@@ -49,7 +50,7 @@ function search(keyword, config = {}, retries = 2) {
const category = config.category;
return Promises.first(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrents.php?cat=${category}&page=${page - 1}&search=${keyword}`)))
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrents.php?cat=${category}&page=${page - 1}&search=${keyword}`)))
.then((body) => parseTableBody(body))
.catch(() => search(keyword, config, retries - 1));
}
@@ -63,19 +64,19 @@ function browse(config = {}, retries = 2, error = null) {
const category = config.category;
return Promises.first(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrents.php?cat=${category}&page=${page - 1}`)))
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrents.php?cat=${category}&page=${page - 1}`)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1, err));
}
function singleRequest(requestUrl) {
const options = { userAgent: getRandomUserAgent(), open_timeout: defaultTimeout, follow: 2 };
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: defaultTimeout };
return needle('get', requestUrl, options)
return axios.get(requestUrl, options)
.then((response) => {
const body = response.body;
const body = response.data;
if (!body) {
throw new Error(`No body: ${requestUrl} with status ${response.statusCode}`);
throw new Error(`No body: ${requestUrl} with status ${response.status}`);
} else if (body.includes('Access Denied')) {
console.log(`Access Denied: ${requestUrl}`);
throw new Error(`Access Denied: ${requestUrl}`);
@@ -157,21 +158,6 @@ function parseTorrentPage(body) {
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB') || sizeText.includes('kB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
}
function parseDate(dateString) {
if (dateString.includes('ago')) {
const amount = parseInt(dateString, 10);