Updated scrapers to latest available commit

This commit is contained in:
Gabisonfire
2024-01-17 16:43:58 -05:00
parent cab7f38c66
commit 909ade0d8e
39 changed files with 311 additions and 299 deletions

View File

@@ -1,8 +1,7 @@
const needle = require("needle")
const axios = require('axios');
const moment = require("moment")
const cheerio = require("cheerio");
const decode = require('magnet-uri');
const Promises = require('../../lib/promises');
const { escapeHTML } = require('../../lib/metadata');
const { getRandomUserAgent } = require('../../lib/requestHelper');
const { isPtDubbed, sanitizePtName, sanitizePtLanguages, sanitizePtOriginalName } = require('../scraperHelper')
@@ -63,11 +62,11 @@ function browse(config = {}, retries = 2) {
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = { userAgent: getRandomUserAgent(), open_timeout: timeout, follow: 2 };
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
return needle('get', requestUrl, options)
return axios.get(requestUrl, options)
.then((response) => {
const body = response.body;
const body = response.data;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
@@ -75,7 +74,8 @@ function singleRequest(requestUrl, config = {}) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
})
.catch(error => Promise.reject(error.message || error));
}
function parseTableBody(body) {
@@ -141,8 +141,10 @@ function parseOriginalName(originalNameElem) {
if (!originalNameElem[0]) {
return '';
}
const originalName = originalNameElem.next().text().trim() || originalNameElem[0].nextSibling.nodeValue;
return originalName.replace(/: ?/, '');
const originalName = originalNameElem.next().text()
|| originalNameElem[0].nextSibling.nodeValue
|| originalNameElem.text();
return originalName.replace(/[^:]*: ?/, '').trim();
}
function parseCategory(categorys) {