adds 1337x and tpb latest scrapers

This commit is contained in:
TheBeastLT
2020-03-08 00:58:06 +01:00
parent 51eb21f112
commit 853c21472a
9 changed files with 457 additions and 432 deletions

174
scrapers/1337x/1337x_api.js Normal file
View File

@@ -0,0 +1,174 @@
const cheerio = require('cheerio');
const needle = require('needle');
const Sugar = require('sugar-date');
const decode = require('magnet-uri');
const defaultProxies = [
'https://1337x.to'
];
const defaultTimeout = 30000;
const Categories = {
MOVIE: 'Movies',
TV: 'TV',
ANIME: 'Anime',
DOCUMENTARIES: 'Documentaries',
APPS: 'Apps',
GAMES: 'Games',
MUSIC: 'Music',
PORN: 'XXX',
OTHER: 'Other',
};
function torrent(torrentSlug, config = {}, retries = 2) {
if (!torrentSlug || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentSlug} query`));
}
const proxyList = config.proxyList || defaultProxies;
const slug = torrentSlug.startsWith('/torrent/') ? torrentSlug.replace('/torrent/', '') : torrentSlug;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId: slug, ...torrent }))
.catch((err) => torrent(slug, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
const category = config.category || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
.then((response) => {
const body = response.body;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseTableBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const torrents = [];
$('.table > tbody > tr').each((i, element) => {
const row = $(element);
torrents.push({
name: row.find('a').eq(1).text(),
slug: row.find('a').eq(1).attr('href').replace('/torrent/', ''),
seeders: parseInt(row.children('td.coll-2').text()),
leechers: parseInt(row.children('td.coll-3').text()),
size: parseSize(row.children('td.coll-4').text())
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const details = $('.torrent-detail-page');
const magnetLink = details.find('a:contains(\'Magnet Download\')').attr('href');
const imdbIdMatch = details.find('div[id=\'description\']').html().match(/imdb\.com\/title\/tt(\d+)/i);
const torrent = {
name: decode(magnetLink).dn,
magnetLink: magnetLink,
infoHash: details.find('strong:contains(\'Infohash\')').next().text(),
seeders: parseInt(details.find('strong:contains(\'Seeders\')').next().text(), 10),
leechers: parseInt(details.find('strong:contains(\'Leechers\')').next().text(), 10),
category: details.find('strong:contains(\'Category\')').next().text(),
language: details.find('strong:contains(\'Language\')').next().text(),
size: parseSize(details.find('strong:contains(\'Total size\')').next().text()),
uploadDate: Sugar.Date.create(details.find('strong:contains(\'Date uploaded\')').next().text()),
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
files: details.find('div[id=\'files\']').first().find('li')
.map((i, elem) => $(elem).text())
.map((i, text) => ({
fileIndex: i,
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
path: text.match(/^(.+)\s\(.+\)$/)[1],
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
})).get()
};
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText) * scale);
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { torrent, search, browse, Categories };

View File

@@ -0,0 +1,75 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const leetx = require('./1337x_api');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
const NAME = '1337x';
const UNTIL_PAGE = 1;
const TYPE_MAPPING = {
'Movies': Type.MOVIE,
'Documentaries': Type.MOVIE,
'TV': Type.SERIES,
'Anime': Type.ANIME
};
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return lastScrape.save();
});
}
async function getLatestTorrents() {
const movies = await getLatestTorrentsForCategory(leetx.Categories.MOVIE);
const series = await getLatestTorrentsForCategory(leetx.Categories.TV);
const anime = await getLatestTorrentsForCategory(leetx.Categories.ANIME);
const docs = await getLatestTorrentsForCategory(leetx.Categories.DOCUMENTARIES);
return movies.concat(series).concat(anime).concat(docs);
}
async function getLatestTorrentsForCategory(category, page = 1) {
return leetx.browse(({ category: category, page: page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return;
}
const torrentFound = await leetx.torrent(record.slug).catch(() => undefined);
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
seeders: torrentFound.seeders,
size: torrentFound.size,
type: TYPE_MAPPING[torrentFound.category],
uploadDate: torrentFound.uploadDate,
imdbId: torrentFound.imdbId,
};
return createTorrentEntry(torrent);
}
module.exports = { scrape };

View File

@@ -19,7 +19,7 @@ async function scrape() {
if (!lastScraped.lastScraped) {
console.log(`${NAME}: no previous scrapping exist`);
await _scrapeAllShows()
//await _scrapeAllShows()
}
}

View File

@@ -1,6 +1,7 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const decode = require('magnet-uri');
const defaultProxies = [
'https://thepiratebay.org',
@@ -10,12 +11,7 @@ const defaultProxies = [
const dumpUrl = '/static/dump/csv/';
const defaultTimeout = 30000;
const errors = {
REQUEST_ERROR: { code: 'REQUEST_ERROR' },
PARSER_ERROR: { code: 'PARSER_ERROR' }
};
Categories = {
const Categories = {
AUDIO: {
ALL: 100,
MUSIC: 101,
@@ -99,7 +95,7 @@ function search(keyword, config = {}, retries = 2) {
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 0;
const category = config.cat || 0;
const category = config.category || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
@@ -107,6 +103,20 @@ function search(keyword, config = {}, retries = 2) {
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 0;
const category = config.category || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config)))
.then((body) => parseBody(body))
.catch((err) => browse(config, retries - 1));
}
function dumps(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed dump search`));
@@ -150,7 +160,7 @@ function parseBody(body) {
const $ = cheerio.load(body);
if (!$) {
reject(new Error(errors.PARSER_ERROR));
reject(new Error('Failed loading body'));
}
const torrents = [];
@@ -183,7 +193,7 @@ function parseTorrentPage(body) {
const $ = cheerio.load(body);
if (!$) {
reject(new Error(errors.PARSER_ERROR));
reject(new Error('Failed loading body'));
}
const details = $('div[id=\'details\']');
const col1 = details.find('dl[class=\'col1\']');
@@ -194,6 +204,7 @@ function parseTorrentPage(body) {
seeders: parseInt(details.find('dt:contains(\'Seeders:\')').next().text(), 10),
leechers: parseInt(details.find('dt:contains(\'Leechers:\')').next().text(), 10),
magnetLink: details.find('a[title=\'Get this torrent\']').attr('href'),
infoHash: decode(details.find('a[title=\'Get this torrent\']').attr('href')).infoHash,
category: Categories.VIDEO.ALL,
subcategory: parseInt(col1.find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0], 10),
size: parseSize(details.find('dt:contains(\'Size:\')').next().text().match(/(\d+)(?:.?Bytes)/)[1]),
@@ -237,4 +248,4 @@ function raceFirstSuccessful(promises) {
);
}
module.exports = { torrent, search, dumps, Categories };
module.exports = { torrent, search, browse, dumps, Categories };

View File

@@ -0,0 +1,74 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const thepiratebay = require('./thepiratebay_api.js');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
const NAME = 'ThePirateBay';
const UNTIL_PAGE = 1;
const limiter = new Bottleneck({ maxConcurrent: 40 });
const allowedCategories = [
thepiratebay.Categories.VIDEO.MOVIES,
thepiratebay.Categories.VIDEO.MOVIES_HD,
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
thepiratebay.Categories.VIDEO.MOVIES_3D,
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
const seriesCategories = [
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return lastScrape.save();
});
}
async function getLatestTorrents(page = 0) {
return thepiratebay.browse(({ category: thepiratebay.Categories.VIDEO.ALL, page: page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return;
}
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
size: torrentFound.size,
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
module.exports = { scrape };