moves scraper to a subpackage

This commit is contained in:
TheBeastLT
2020-03-10 15:12:18 +01:00
parent dff66d8fb2
commit 58aba322c2
30 changed files with 50 additions and 28 deletions

View File

@@ -0,0 +1,174 @@
const cheerio = require('cheerio');
const needle = require('needle');
const Sugar = require('sugar-date');
const decode = require('magnet-uri');
const defaultProxies = [
'https://1337x.to'
];
const defaultTimeout = 10000;
const Categories = {
MOVIE: 'Movies',
TV: 'TV',
ANIME: 'Anime',
DOCUMENTARIES: 'Documentaries',
APPS: 'Apps',
GAMES: 'Games',
MUSIC: 'Music',
PORN: 'XXX',
OTHER: 'Other',
};
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} query`));
}
const proxyList = config.proxyList || defaultProxies;
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId: slug, ...torrent }))
.catch((err) => torrent(slug, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
const category = config.category;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
.then((response) => {
const body = response.body;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseTableBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const torrents = [];
$('.table > tbody > tr').each((i, element) => {
const row = $(element);
torrents.push({
name: row.find('a').eq(1).text(),
torrentId: row.find('a').eq(1).attr('href').replace('/torrent/', ''),
seeders: parseInt(row.children('td.coll-2').text()),
leechers: parseInt(row.children('td.coll-3').text()),
size: parseSize(row.children('td.coll-4').text())
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const details = $('.torrent-detail-page');
const magnetLink = details.find('a:contains(\'Magnet Download\')').attr('href');
const imdbIdMatch = details.find('div[id=\'description\']').html().match(/imdb\.com\/title\/tt(\d+)/i);
const torrent = {
name: decode(magnetLink).name.replace(/\+/g, ' '),
infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink,
seeders: parseInt(details.find('strong:contains(\'Seeders\')').next().text(), 10),
leechers: parseInt(details.find('strong:contains(\'Leechers\')').next().text(), 10),
category: details.find('strong:contains(\'Category\')').next().text(),
language: details.find('strong:contains(\'Language\')').next().text(),
size: parseSize(details.find('strong:contains(\'Total size\')').next().text()),
uploadDate: Sugar.Date.create(details.find('strong:contains(\'Date uploaded\')').next().text()),
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
files: details.find('div[id=\'files\']').first().find('li')
.map((i, elem) => $(elem).text())
.map((i, text) => ({
fileIndex: i,
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
path: text.match(/^(.+)\s\(.+\)$/)[1],
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
})).get()
};
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText) * scale);
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { torrent, search, browse, Categories };

View File

@@ -0,0 +1,89 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const leetx = require('./1337x_api');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const {
createTorrentEntry,
createSkipTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = '1337x';
const UNTIL_PAGE = 1;
const TYPE_MAPPING = typeMapping();
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function getLatestTorrents() {
const allowedCategories = [
leetx.Categories.MOVIE,
leetx.Categories.TV,
leetx.Categories.ANIME,
leetx.Categories.DOCUMENTARIES
];
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
.then(entries => entries.reduce((a, b) => a.concat(b), []));
}
async function getLatestTorrentsForCategory(category, page = 1) {
return leetx.browse(({ category: category, page: page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
seeders: torrentFound.seeders,
size: torrentFound.size,
type: TYPE_MAPPING[torrentFound.category],
uploadDate: torrentFound.uploadDate,
imdbId: torrentFound.imdbId,
};
return createTorrentEntry(torrent);
}
function typeMapping() {
const mapping = {};
mapping[leetx.Categories.MOVIE] = Type.MOVIE;
mapping[leetx.Categories.DOCUMENTARIES] = Type.MOVIE;
mapping[leetx.Categories.TV] = Type.SERIES;
mapping[leetx.Categories.ANIME] = Type.ANIME;
return mapping;
}
module.exports = { scrape };