Change Flaresolverr mechanic to use cookies after initial flaresolverr request

This commit is contained in:
Cedric
2024-01-31 14:19:46 -06:00
parent 07d4d020e6
commit ee34b6db17

View File

@@ -10,9 +10,12 @@ const { parseSize } = require("../scraperHelper");
const defaultProxies = [
'https://1337x.to'
];
const defaultTimeout = 10000;
const defaultTimeout = 50000;
const maxSearchPage = 50;
let flaresolverrUserAgent = '';
let flaresolverrCookies = '';
const Categories = {
MOVIE: 'Movies',
TV: 'TV',
@@ -33,10 +36,10 @@ function torrent(torrentId, config = {}, retries = 2) {
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
return Promises.first(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId: slug, ...torrent }))
.catch((err) => torrent(slug, config, retries - 1));
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId: slug, ...torrent }))
.catch((err) => torrent(slug, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
@@ -48,17 +51,17 @@ function search(keyword, config = {}, retries = 2) {
const category = config.category;
const extendToPage = Math.min(maxSearchPage, (config.extendToPage || 1))
const requestUrl = proxyUrl => category
? `${proxyUrl}/category-search/${keyword}/${category}/${page}/`
: `${proxyUrl}/search/${keyword}/${page}/`;
? `${proxyUrl}/category-search/${keyword}/${category}/${page}/`
: `${proxyUrl}/search/${keyword}/${page}/`;
return Promises.first(proxyList
.map(proxyUrl => singleRequest(requestUrl(proxyUrl), config)))
.then(body => parseTableBody(body))
.then(torrents => torrents.length === 40 && page < extendToPage
? search(keyword, { ...config, page: page + 1 }).catch(() => [])
.then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch((err) => search(keyword, config, retries - 1));
.map(proxyUrl => singleRequest(requestUrl(proxyUrl), config)))
.then(body => parseTableBody(body))
.then(torrents => torrents.length === 40 && page < extendToPage
? search(keyword, { ...config, page: page + 1 }).catch(() => [])
.then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
@@ -70,38 +73,66 @@ function browse(config = {}, retries = 2) {
const category = config.category;
const sort = config.sort;
const requestUrl = proxyUrl => sort
? `${proxyUrl}/sort-cat/${category}/${sort}/desc/${page}/`
: `${proxyUrl}/cat/${category}/${page}/`;
? `${proxyUrl}/sort-cat/${category}/${sort}/desc/${page}/`
: `${proxyUrl}/cat/${category}/${page}/`;
return Promises.first(proxyList
.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)))
.then((body) => parseTableBody(body))
.catch((err) => {
console.error(err);
browse(config, retries - 1);
});
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
let options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
return axios.post('http://flaresolverr:8191/v1', {
cmd: 'request.get',
url: requestUrl,
}, options)
if (flaresolverrUserAgent === '' || flaresolverrCookies === '') {
console.log("using flaresolverr");
return axios.post('http://flaresolverr:8191/v1', {
cmd: 'request.get',
url: requestUrl,
}, options)
.then((response) => {
if (response.data.status !== 'ok'){
if (response.data.status !== 'ok') {
throw new Error(`FlareSolverr did not return status 'ok': ${response.data.message}`)
}
const body = response.data.solution.response;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
flaresolverrUserAgent = response.data.solution.userAgent;
response.data.solution.cookies.forEach(cookie => {
flaresolverrCookies = flaresolverrCookies + `${cookie.name}=${cookie.value}; `;
});
return body;
});
}
else {
console.log("using direct request");
options.headers['User-Agent'] = flaresolverrUserAgent;
options.headers['Cookie'] = flaresolverrCookies;
return axios.get(requestUrl, options)
.then((response) => {
const body = response.data;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
})
}
}
function parseTableBody(body) {
@@ -153,13 +184,13 @@ function parseTorrentPage(body) {
uploadDate: parseDate(details.find('strong:contains(\'Date uploaded\')').next().text()),
imdbId: imdbIdMatch && imdbIdMatch[1],
files: details.find('div[id=\'files\']').first().find('li')
.map((i, elem) => $(elem).text())
.map((i, text) => ({
fileIndex: i,
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
path: text.match(/^(.+)\s\(.+\)$/)[1],
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
})).get()
.map((i, elem) => $(elem).text())
.map((i, text) => ({
fileIndex: i,
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
path: text.match(/^(.+)\s\(.+\)$/)[1],
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
})).get()
};
resolve(torrent);
});