moves scraper to a subpackage

This commit is contained in:
TheBeastLT
2020-03-10 15:12:18 +01:00
parent dff66d8fb2
commit 58aba322c2
30 changed files with 50 additions and 28 deletions

View File

@@ -0,0 +1,174 @@
const cheerio = require('cheerio');
const needle = require('needle');
const Sugar = require('sugar-date');
const decode = require('magnet-uri');
const defaultProxies = [
'https://1337x.to'
];
const defaultTimeout = 10000;
const Categories = {
MOVIE: 'Movies',
TV: 'TV',
ANIME: 'Anime',
DOCUMENTARIES: 'Documentaries',
APPS: 'Apps',
GAMES: 'Games',
MUSIC: 'Music',
PORN: 'XXX',
OTHER: 'Other',
};
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} query`));
}
const proxyList = config.proxyList || defaultProxies;
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId: slug, ...torrent }))
.catch((err) => torrent(slug, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
const category = config.category;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
.then((response) => {
const body = response.body;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
!(body.includes('1337x</title>'))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseTableBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const torrents = [];
$('.table > tbody > tr').each((i, element) => {
const row = $(element);
torrents.push({
name: row.find('a').eq(1).text(),
torrentId: row.find('a').eq(1).attr('href').replace('/torrent/', ''),
seeders: parseInt(row.children('td.coll-2').text()),
leechers: parseInt(row.children('td.coll-3').text()),
size: parseSize(row.children('td.coll-4').text())
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const details = $('.torrent-detail-page');
const magnetLink = details.find('a:contains(\'Magnet Download\')').attr('href');
const imdbIdMatch = details.find('div[id=\'description\']').html().match(/imdb\.com\/title\/tt(\d+)/i);
const torrent = {
name: decode(magnetLink).name.replace(/\+/g, ' '),
infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink,
seeders: parseInt(details.find('strong:contains(\'Seeders\')').next().text(), 10),
leechers: parseInt(details.find('strong:contains(\'Leechers\')').next().text(), 10),
category: details.find('strong:contains(\'Category\')').next().text(),
language: details.find('strong:contains(\'Language\')').next().text(),
size: parseSize(details.find('strong:contains(\'Total size\')').next().text()),
uploadDate: Sugar.Date.create(details.find('strong:contains(\'Date uploaded\')').next().text()),
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
files: details.find('div[id=\'files\']').first().find('li')
.map((i, elem) => $(elem).text())
.map((i, text) => ({
fileIndex: i,
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
path: text.match(/^(.+)\s\(.+\)$/)[1],
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
})).get()
};
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText) * scale);
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { torrent, search, browse, Categories };

View File

@@ -0,0 +1,89 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const leetx = require('./1337x_api');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const {
createTorrentEntry,
createSkipTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = '1337x';
const UNTIL_PAGE = 1;
const TYPE_MAPPING = typeMapping();
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function getLatestTorrents() {
const allowedCategories = [
leetx.Categories.MOVIE,
leetx.Categories.TV,
leetx.Categories.ANIME,
leetx.Categories.DOCUMENTARIES
];
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
.then(entries => entries.reduce((a, b) => a.concat(b), []));
}
async function getLatestTorrentsForCategory(category, page = 1) {
return leetx.browse(({ category: category, page: page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
seeders: torrentFound.seeders,
size: torrentFound.size,
type: TYPE_MAPPING[torrentFound.category],
uploadDate: torrentFound.uploadDate,
imdbId: torrentFound.imdbId,
};
return createTorrentEntry(torrent);
}
function typeMapping() {
const mapping = {};
mapping[leetx.Categories.MOVIE] = Type.MOVIE;
mapping[leetx.Categories.DOCUMENTARIES] = Type.MOVIE;
mapping[leetx.Categories.TV] = Type.SERIES;
mapping[leetx.Categories.ANIME] = Type.ANIME;
return mapping;
}
module.exports = { scrape };

View File

@@ -0,0 +1,136 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const defaultUrl = 'https://horriblesubs.info';
const defaultTimeout = 10000;
function allShows(config = {}) {
return _getContent('/shows', config)
.then(($) => $('div[class="ind-show"]')
.map((index, element) => $(element).children('a'))
.map((index, element) => ({
title: element.attr('title'),
url: `${config.proxyUrl || defaultUrl}${element.attr('href')}`
})).get());
}
async function showData(showInfo, config = {}) {
const showEndpoint = (showInfo.url || showInfo).match(/\/show.+/)[0];
const title = showInfo.title;
const showId = await _getShowId(showEndpoint);
const packEntries = await _getShowEntries(showId, title, 'batch', config);
const singleEntries = await _getShowEntries(showId, title, 'show', config);
return {
title: title,
url: showInfo.url || showInfo,
showId: showId,
singleEpisodes: singleEntries,
packEpisodes: packEntries
};
}
async function getLatestEntries(config = {}) {
return _getAllLatestEntries(config)
.then((entries) => Promise.all(entries.map((entry) => _findLatestEntry(entry, config))))
.then((entries) => entries.filter((entry) => entry))
}
function _getContent(endpoint, config = {},) {
const baseUrl = config.proxyUrl || defaultUrl;
const timeout = config.timeout || defaultTimeout;
const url = endpoint.startsWith('http')
? endpoint.replace(/https?:\/\/[^/]+/, baseUrl)
: `${baseUrl}${endpoint}`;
return needle('get', url, { open_timeout: timeout, follow: 2 })
.then((response) => response.body)
.then((body) => cheerio.load(body));
}
function _getShowId(showEndpoint) {
return _getContent(showEndpoint)
.then($ => $('div.entry-content').find('script').html().match(/var hs_showid = (\d+)/)[1]);
}
function _getShowEntries(animeId, animeTitle, type, config) {
return _getAllEntries(animeId, type, config)
.then((entries) => entries.filter((entry) => entry.title === animeTitle));
}
function _getAllEntries(animeId, type, config, page = 0, autoExtend = true) {
const entriesEndpoint = `/api.php?method=getshows&type=${type}&showid=${animeId}&nextid=${page}`;
return _getEntries(entriesEndpoint, config)
.then((entries) => !autoExtend || !entries.length ? entries :
_getAllEntries(animeId, type, config, page + 1)
.then((nextEntries) => entries.concat(nextEntries)));
}
function _getEntries(endpoint, config) {
return _getContent(endpoint, config)
.then(($) => $('div[class="rls-info-container"]')
.map((index, element) => ({
title: $(element).find('a[class="rls-label"]').contents()
.filter((i, el) => el.nodeType === 3).first().text().trim(),
episode: $(element).find('a[class="rls-label"]').find('strong').text(),
uploadDate: _parseDate($(element).find('a[class="rls-label"]').find('span[class="rls-date"]').text()),
mirrors: $(element).find('div[class="rls-links-container"]').children()
.map((indexLink, elementLink) => ({
resolution: $(elementLink).attr('id').match(/\d+p$/)[0],
magnetLink: $(elementLink).find('a[title="Magnet Link"]').attr('href'),
torrentLink: $(elementLink).find('a[title="Torrent Link"]').attr('href')
})).get()
})).get());
}
function _getAllLatestEntries(config, page = 0) {
const pageParam = page === 0 ? '' : `&nextid=${page}`;
const entriesEndpoint = `/api.php?method=getlatest${pageParam}`;
return _getContent(entriesEndpoint, config)
.then(($) => $('li a')
.map((index, element) => ({
urlEndpoint: $(element).attr('href'),
episode: $(element).find('strong').text()
})).get())
.then((entries) => entries.length < 12
? entries
: _getAllLatestEntries(config, page + 1)
.then((nextEntries) => entries.concat(nextEntries)));
}
async function _findLatestEntry(entry, config) {
const showId = await _getShowId(entry.urlEndpoint);
let foundEntry;
let page = 0;
let reachedEnd = false;
while (!foundEntry && !reachedEnd) {
const allEntries = await _getAllEntries(showId, 'show', config, page, false);
foundEntry = allEntries.filter((e) => e.episode === entry.episode)[0];
page = page + 1;
reachedEnd = allEntries.length === 0;
}
if (!foundEntry) {
return;
}
return {
title: foundEntry.title,
url: entry.urlEndpoint,
showId: showId,
singleEpisodes: [foundEntry]
};
}
function _parseDate(date) {
if (date.match(/today/i)) {
return moment().toDate();
} else if (date.match(/yesterday/i)) {
return moment().subtract(1, 'day').toDate();
}
return moment(date, 'MM/DD/YYYY').toDate();
}
module.exports = { allShows, showData, getLatestEntries, _getShowId };

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,189 @@
const fs = require('fs');
const moment = require('moment');
const Bottleneck = require('bottleneck');
const decode = require('magnet-uri');
const horriblesubs = require('./horriblesubs_api.js');
const repository = require('../../lib/repository');
const { Type } = require('../../lib/types');
const { updateCurrentSeeders } = require('../../lib/torrent');
const { parseTorrentFiles } = require('../../lib/torrentFiles');
const { getMetadata, getKitsuId } = require('../../lib/metadata');
const showMappings = require('./horriblesubs_mapping.json');
const NAME = 'HorribleSubs';
const NEXT_FULL_SCRAPE_OFFSET = 3 * 24 * 60 * 60; // 3 days;
const limiter = new Bottleneck({ maxConcurrent: 5 });
const entryLimiter = new Bottleneck({ maxConcurrent: 10 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
const lastScraped = lastScrape.lastScraped && moment.unix(lastScrape.lastScraped);
if (!lastScraped || lastScraped.add(NEXT_FULL_SCRAPE_OFFSET, 'seconds') < scrapeStart) {
console.log(`[${scrapeStart}] scrapping all ${NAME} shows...`);
return _scrapeAllShows()
.then(() => {
lastScrape.lastScraped = scrapeStart;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished scrapping all ${NAME} shows`));
} else {
console.log(`[${scrapeStart}] scrapping latest ${NAME} entries...`);
return _scrapeLatestEntries()
.then(() => console.log(`[${moment()}] finished scrapping latest ${NAME} entries`));
}
}
async function _scrapeLatestEntries() {
const latestEntries = await horriblesubs.getLatestEntries();
return Promise.all(latestEntries
.map((entryData) => limiter.schedule(() => _parseShowData(entryData)
.catch((err) => console.log(err)))));
}
async function _scrapeAllShows() {
const shows = await horriblesubs.allShows();
return Promise.all(shows
.map((show) => limiter.schedule(() => horriblesubs.showData(show)
.then((showData) => _parseShowData(showData))
.catch((err) => console.log(err)))));
}
async function compareSearchKitsuIds() {
console.log(`${NAME}: initiating kitsu compare...`);
const shows = await horriblesubs.allShows()
.then((shows) => Promise.all(shows.slice(0, 1).map((show) => limiter.schedule(() => enrichShow(show)))));
const incorrect = shows.filter(
(show) => showMappings[show.title] && showMappings[show.title].kitsu_id !== show.kitsu_id);
const incorrectRatio = incorrect.length / shows.length;
console.log(incorrect);
console.log(`Ratio: ${incorrectRatio}`);
}
async function initMapping() {
console.log(`${NAME}: initiating kitsu mapping...`);
const shows = await horriblesubs.allShows()
.then((shows) => shows.filter((show) => !showMappings[show.title]))
.then((shows) => Promise.all(shows.map((show) => limiter.schedule(() => enrichShow(show)))))
.then((shows) => shows.reduce((map, show) => (map[show.title] = show, map), showMappings));
fs.writeFile("./scrapers/horriblesubs/horriblesubs_mapping.json", JSON.stringify(shows), 'utf8', function (err) {
if (err) {
console.log("An error occurred while writing JSON Object to File.");
} else {
console.log(`${NAME}: finished kitsu mapping`);
}
});
}
async function enrichShow(show) {
console.log(`${NAME}: getting show info for ${show.title}...`);
const showId = await horriblesubs._getShowId(show.url)
.catch(() => show.title);
const metadata = await getKitsuId({ title: show.title })
.then((kitsuId) => getMetadata(kitsuId))
.catch((error) => {
console.log(`Failed getting kitsu meta: ${error.message}`);
return {};
});
return {
showId: showId,
kitsu_id: metadata.kitsuId,
...show,
kitsuTitle: metadata.title,
imdb_id: metadata.imdbId
}
}
async function _parseShowData(showData) {
console.log(`${NAME}: scrapping ${showData.title} data...`);
const showMapping = showMappings[showData.title];
const kitsuId = showMapping && showMapping.kitsu_id;
if (!showMapping) {
throw new Error(`No kitsu mapping found for ${showData.title}`);
}
if (!kitsuId) {
throw new Error(`No kitsuId found for ${showData.title}`);
}
// sometimes horriblesubs entry contains multiple season in it, so need to split it per kitsu season entry
const kitsuIdsMapping = Array.isArray(kitsuId) && await Promise.all(kitsuId.map(kitsuId => getMetadata(kitsuId)))
.then((metas) => metas.reduce((map, meta) => {
const epOffset = Object.keys(map).length;
[...Array(meta.totalCount).keys()]
.map(ep => ep + 1)
.forEach(ep => map[ep + epOffset] = { kitsuId: meta.kitsuId, episode: ep, title: meta.title });
return map;
}, {})) || {};
const formatTitle = (episodeInfo, mirror) => {
const mapping = kitsuIdsMapping[episodeInfo.episode.replace(/^0+/, '')];
if (mapping) {
return `${mapping.title} - ${mapping.episode} [${mirror.resolution}]`;
}
return `${episodeInfo.title} - ${episodeInfo.episode} [${mirror.resolution}]`;
};
const getKitsuId = inputEpisode => {
const episodeString = inputEpisode.includes('-') && inputEpisode.split('-')[0] || inputEpisode;
const episode = parseInt(episodeString, 10);
return kitsuIdsMapping[episode] && kitsuIdsMapping[episode].kitsuId || kitsuId;
};
return Promise.all([].concat(showData.singleEpisodes).concat(showData.packEpisodes)
.map((episodeInfo) => episodeInfo.mirrors
.map((mirror) => ({
provider: NAME,
...mirror,
infoHash: decode(mirror.magnetLink).infoHash,
trackers: decode(mirror.magnetLink).tr.join(','),
title: formatTitle(episodeInfo, mirror),
size: 300000000,
type: Type.ANIME,
kitsuId: getKitsuId(episodeInfo.episode),
uploadDate: episodeInfo.uploadDate,
})))
.reduce((a, b) => a.concat(b), [])
.map((incompleteTorrent) => entryLimiter.schedule(() => checkIfExists(incompleteTorrent)
.then((torrent) => torrent && updateCurrentSeeders(torrent))
.then((torrent) => torrent && parseTorrentFiles(torrent)
.then((files) => verifyFiles(torrent, files))
.then((files) => repository.createTorrent(torrent)
.then(() => files.forEach(file => repository.createFile(file)))
.then(() => console.log(`Created entry for ${torrent.title}`)))))))
.then(() => console.log(`${NAME}: finished scrapping ${showData.title} data`));
}
async function verifyFiles(torrent, files) {
if (files && files.length) {
const existingFiles = await repository.getFiles({ infoHash: files[0].infoHash })
.then((existing) => existing.reduce((map, file) => (map[file.fileIndex] = file, map), {}))
.catch(() => undefined);
if (existingFiles && Object.keys(existingFiles).length) {
return files
.map(file => ({
...file,
id: existingFiles[file.fileIndex] && existingFiles[file.fileIndex].id,
size: existingFiles[file.fileIndex] && existingFiles[file.fileIndex].size || file.size
}))
}
return files;
}
return Promise.reject(`No video files found for: ${torrent.title}`);
}
async function checkIfExists(torrent) {
const existingTorrent = await repository.getTorrent(torrent).catch(() => undefined);
if (!existingTorrent) {
return torrent; // no torrent exists yet
} else if (existingTorrent.provider === NAME) {
return undefined; // torrent by this provider already exists
}
return { ...torrent, size: existingTorrent.size, seeders: existingTorrent.seeders };
}
module.exports = { scrape };

View File

@@ -0,0 +1,187 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const decode = require('magnet-uri');
const defaultProxies = [
'https://katcr.co'
];
const defaultTimeout = 10000;
const Categories = {
MOVIE: 'movies',
TV: 'tv',
ANIME: 'anime',
APPS: 'applications',
GAMES: 'games',
MUSIC: 'music',
BOOKS: 'books',
PORN: 'xxx',
OTHER: 'other',
};
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} search`));
}
const proxyList = config.proxyList || defaultProxies;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId, ...torrent }))
.catch((err) => torrent(torrentId, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
const category = config.category;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
.then((body) => parseTableBody(body))
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 1;
const category = config.category;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/category/${category}/page/${page}`, config)))
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
.then((response) => {
const body = response.body;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('Access Denied')) {
console.log(`Access Denied: ${requestUrl}`);
throw new Error(`Access Denied: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
body.includes('Origin DNS error') ||
!body.includes('Kickass Torrents</title>')) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseTableBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const torrents = [];
$('.table > tbody > tr').each((i, element) => {
const row = $(element);
torrents.push({
torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''),
name: row.find('a[class="torrents_table__torrent_title"]').first().children('b').text(),
category: row.find('span[class="torrents_table__upload_info"]').first().children('a').first().attr('href')
.match(/category\/([^\/]+)/)[1],
seeders: parseInt(row.find('td[data-title="Seed"]').first().text()),
leechers: parseInt(row.find('td[data-title="Leech"]').first().text()),
size: parseSize(row.find('td[data-title="Size"]').first().text()),
uploadDate: moment(row.find('td[data-title="Age"]').first().attr('title')).toDate()
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const content = $('div[class="col"]').first();
const info = content.find('div[class="torrent_stats"]').parent();
const description = content.find('div[id="main"]');
const magnetLink = info.find('a[title="Download verified Magnet"]').attr('href');
const imdbIdMatch = description.html().match(/imdb\.com\/title\/tt(\d+)/i);
const torrent = {
name: info.find('h1').first().text(),
infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink,
seeders: parseInt(info.find('span[class="torrent_stats__seed_count mr-2"]').first().text().match(/\d+/)[0], 10),
leechers: parseInt(info.find('span[class="torrent_stats__leech_count mr-2"]').first().text().match(/\d+/)[0], 10),
category: info.find('small').first().children('a').first().attr('href').match(/\/category\/([^\/]+)/)[1],
language: description.find('span:contains(\'Audio\')').next().children().eq(0).text(),
size: parseSize(description.find('ul[class="file_list"]').first().find('li').first().contents().eq(2).text()
.match(/\(Size: (.+)\)/)[1]),
uploadDate: moment(info.find('time').first().text()).toDate(),
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
files: content.find('ul[class="file_list"]').first().find('li > ul > li[class="file_list__file"]')
.map((i, elem) => $(elem))
.map((i, ele) => ({
fileIndex: i,
name: ele.find('span > ul > li').contents().eq(1).text().trim().replace(/^.+\//g, ''),
path: ele.find('span > ul > li').contents().eq(1).text().trim(),
size: parseSize(ele.contents().eq(2).text())
})).get()
};
if (torrent.files.length >= 50) {
// a max of 50 files are displayed on the page
delete torrent.files;
}
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KB') || sizeText.includes('kB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { torrent, search, browse, Categories };

View File

@@ -0,0 +1,98 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const LineByLineReader = require('line-by-line');
const fs = require('fs');
const { Type } = require('../../lib/types');
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
const NAME = 'KickassTorrents';
const CSV_FILE_PATH = '/tmp/kickass.csv';
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
console.log(`starting to scrape KAT dump: ${JSON.stringify(lastDump)}`);
let entriesProcessed = 0;
const lr = new LineByLineReader(CSV_FILE_PATH);
lr.on('line', (line) => {
if (entriesProcessed % 1000 === 0) {
console.log(`Processed ${entriesProcessed} entries`);
}
const row = line.match(/(?<=^|\|)(".*"|[^|]+)(?=\||$)/g);
if (row.length !== 11) {
console.log(`Invalid row: ${line}`);
return;
}
const torrent = {
infoHash: row[0].toLowerCase(),
title: row[1]
.replace(/^"|"$/g, '')
.replace(/&amp;/g, '&')
.replace(/&\w{2,6};/g, ' ')
.replace(/\s+/g, ' ')
.trim(),
category: row[2],
size: parseInt(row[5], 10),
seeders: parseInt(row[8], 10),
uploadDate: moment.unix(parseInt(row[10], 10)).toDate(),
};
if (!limiter.empty()) {
lr.pause()
}
limiter.schedule(() => processTorrentRecord(torrent)
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
.then(() => limiter.empty())
.then((empty) => empty && lr.resume())
.then(() => entriesProcessed++);
});
lr.on('error', (err) => {
console.log(err);
});
lr.on('end', () => {
fs.unlink(CSV_FILE_PATH);
console.log(`finished to scrape KAT dump: ${JSON.stringify(lastDump)}!`);
});
}
const categoryMapping = {
"Movies": Type.MOVIE,
"TV": Type.SERIES,
"Anime": Type.ANIME
};
async function processTorrentRecord(record) {
if (!categoryMapping[record.category] || record.seeders === 0) {
return createSkipTorrentEntry(record);
}
if (await getStoredTorrentEntry(record)) {
return;
}
const torrentFound = await findTorrent(record).catch(() => undefined);
if (!torrentFound) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: record.infoHash,
provider: NAME,
title: torrentFound.name,
size: record.size,
type: categoryMapping[record.category],
imdbId: torrentFound.imdbId,
uploadDate: record.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
async function findTorrent(record) {
return Promise.reject("not found");
}
module.exports = { scrape };

View File

@@ -0,0 +1,87 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const kickass = require('./kickass_api');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const {
createTorrentEntry,
createSkipTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = 'KickassTorrents';
const UNTIL_PAGE = 1;
const TYPE_MAPPING = typeMapping();
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function getLatestTorrents() {
const allowedCategories = [
kickass.Categories.MOVIE,
kickass.Categories.TV,
kickass.Categories.ANIME,
];
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
.then(entries => entries.reduce((a, b) => a.concat(b), []));
}
async function getLatestTorrentsForCategory(category, page = 1) {
return kickass.browse(({ category, page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrentFound = await kickass.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
size: torrentFound.size,
type: TYPE_MAPPING[torrentFound.category],
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
function typeMapping() {
const mapping = {};
mapping[kickass.Categories.MOVIE] = Type.MOVIE;
mapping[kickass.Categories.TV] = Type.SERIES;
mapping[kickass.Categories.ANIME] = Type.ANIME;
return mapping;
}
module.exports = { scrape };

View File

@@ -0,0 +1,76 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const rarbg = require('rarbg-api');
const decode = require('magnet-uri');
const { Type } = require('../../lib/types');
const {
createTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = 'RARBG';
const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 });
const entryLimiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
console.log(`[${moment()}] starting ${NAME} dump scrape...`);
const movieImdbIds = require('./rargb_movie_imdb_ids_2020-03-09.json');
const seriesImdbIds = require('./rargb_series_imdb_ids_2020-03-09.json');
const allImdbIds = [].concat(movieImdbIds).concat(seriesImdbIds);
return Promise.all(allImdbIds.map(imdbId => limiter.schedule(() => getTorrentsForImdbId(imdbId))
.then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t)))))))
.then(() => console.log(`[${moment()}] finished ${NAME} dump scrape`));
}
async function getTorrentsForImdbId(imdbId) {
return rarbg.search(imdbId, { limit: 100, sort: 'seeders', format: 'json_extended', ranked: 0 }, 'imdb')
.then(torrents => torrents.map(torrent => ({
name: torrent.title,
infoHash: decode(torrent.download).infoHash,
magnetLink: torrent.download,
seeders: torrent.seeders,
leechers: torrent.leechers,
category: torrent.category,
size: torrent.size,
uploadDate: new Date(torrent.pubdate),
imdbId: torrent.episode_info && torrent.episode_info.imdb
})))
.catch((err) => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrent = {
provider: NAME,
infoHash: record.infoHash,
title: record.name,
type: getType(record.category),
seeders: record.seeders,
size: record.size,
uploadDate: record.uploadDate,
imdbId: record.imdbId
};
return createTorrentEntry(torrent);
}
const seriesCategories = [
'TV Episodes',
'Movies/TV-UHD-episodes',
'TV HD Episodes',
];
function getType(category) {
if (seriesCategories.includes(category)) {
return Type.SERIES;
}
return Type.MOVIE;
}
module.exports = { scrape };

View File

@@ -0,0 +1,103 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const rarbg = require('rarbg-api');
const decode = require('magnet-uri');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const {
createTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = 'RARBG';
const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 });
const entryLimiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => entryLimiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function getLatestTorrents() {
const allowedCategories = [
rarbg.CATEGORY['4K_MOVIES_X264_4k'],
rarbg.CATEGORY['4K_X265_4k'],
rarbg.CATEGORY['4k_X264_4k_HDR'],
rarbg.CATEGORY.MOVIES_XVID,
rarbg.CATEGORY.MOVIES_XVID_720P,
rarbg.CATEGORY.MOVIES_X264,
rarbg.CATEGORY.MOVIES_X264_1080P,
rarbg.CATEGORY.MOVIES_X264_720P,
rarbg.CATEGORY.MOVIES_X264_3D,
rarbg.CATEGORY.MOVIES_FULL_BD,
rarbg.CATEGORY.MOVIES_BD_REMUX,
rarbg.CATEGORY.TV_EPISODES,
rarbg.CATEGORY.TV_UHD_EPISODES,
rarbg.CATEGORY.TV_HD_EPISODES
];
return Promise.all(allowedCategories.map(category => limiter.schedule(() => getLatestTorrentsForCategory(category))))
.then(entries => entries.reduce((a, b) => a.concat(b), []));
}
async function getLatestTorrentsForCategory(category) {
return rarbg.list({ category: category, limit: 100, sort: 'last', format: 'json_extended', ranked: 0 })
.then(torrents => torrents.map(torrent => ({
name: torrent.title,
infoHash: decode(torrent.download).infoHash,
magnetLink: torrent.download,
seeders: torrent.seeders,
leechers: torrent.leechers,
category: torrent.category,
size: torrent.size,
uploadDate: new Date(torrent.pubdate),
imdbId: torrent.episode_info && torrent.episode_info.imdb
})))
.catch((err) => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrent = {
provider: NAME,
infoHash: record.infoHash,
title: record.name,
type: getType(record.category),
seeders: record.seeders,
size: record.size,
uploadDate: record.uploadDate,
imdbId: record.imdbId
};
return createTorrentEntry(torrent);
}
const seriesCategories = [
'TV Episodes',
'Movies/TV-UHD-episodes',
'TV HD Episodes',
];
function getType(category) {
if (seriesCategories.includes(category)) {
return Type.SERIES;
}
return Type.MOVIE;
}
module.exports = { scrape };

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,250 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const decode = require('magnet-uri');
const defaultProxies = [
'https://thepiratebay.org',
'https://piratebays.icu',
'https://piratebays.cool',
'https://piratebays.life'];
const dumpUrl = '/static/dump/csv/';
const defaultTimeout = 10000;
const Categories = {
AUDIO: {
ALL: 100,
MUSIC: 101,
AUDIO_BOOKS: 102,
SOUND_CLIPS: 103,
FLAC: 104,
OTHER: 199
},
VIDEO: {
ALL: 200,
MOVIES: 201,
MOVIES_DVDR: 202,
MUSIC_VIDEOS: 203,
MOVIE_CLIPS: 204,
TV_SHOWS: 205,
HANDHELD: 206,
MOVIES_HD: 207,
TV_SHOWS_HD: 208,
MOVIES_3D: 209,
OTHER: 299
},
APPS: {
ALL: 300,
WINDOWS: 301,
MAC: 302,
UNIX: 303,
HANDHELD: 304,
IOS: 305,
ANDROID: 306,
OTHER_OS: 399
},
GAMES: {
ALL: 400,
PC: 401,
MAC: 402,
PSx: 403,
XBOX360: 404,
Wii: 405,
HANDHELD: 406,
IOS: 407,
ANDROID: 408,
OTHER: 499
},
PORN: {
ALL: 500,
MOVIES: 501,
MOVIES_DVDR: 502,
PICTURES: 503,
GAMES: 504,
MOVIES_HD: 505,
MOVIE_CLIPS: 506,
OTHER: 599
},
OTHER: {
ALL: 600,
E_BOOKS: 601,
COMICS: 602,
PICTURES: 603,
COVERS: 604,
PHYSIBLES: 605,
OTHER: 699
}
};
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} search`));
}
const proxyList = config.proxyList || defaultProxies;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
.then((body) => parseTorrentPage(body))
.then((torrent) => ({ torrentId, ...torrent }))
.catch((err) => torrent(torrentId, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 0;
const category = config.category || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
.then((body) => parseBody(body))
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 0;
const category = config.category || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config)))
.then((body) => parseBody(body))
.catch((err) => browse(config, retries - 1));
}
function dumps(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed dump search`));
}
const proxyList = config.proxyList || defaultProxies;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config)
.then((body) => body.match(/(<a href="[^"]+">[^<]+<\/a>.+\d)/g)
.map((group) => ({
url: `${proxyUrl}${dumpUrl}` + group.match(/<a href="([^"]+)">/)[1],
updatedAt: moment(group.match(/\s+([\w-]+\s+[\d:]+)\s+\d+$/)[1], 'DD-MMM-YYYY HH:mm').toDate()
})))))
.catch(() => dumps(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
.then((response) => {
const body = response.body;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (body.includes('Access Denied') && !body.includes('<title>The Pirate Bay')) {
console.log(`Access Denied: ${requestUrl}`);
throw new Error(`Access Denied: ${requestUrl}`);
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
body.includes('Database maintenance') ||
body.includes('Origin DNS error') ||
!(body.includes('<title>The Pirate Bay') || body.includes('TPB</title>') || body.includes(dumpUrl))) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const torrents = [];
$('table[id=\'searchResult\'] tr').each(function () {
const name = $(this).find('.detLink').text();
const sizeMatcher = $(this).find('.detDesc').text().match(/(?:,\s?Size\s)(.+),/);
if (!name || !sizeMatcher) {
return;
}
torrents.push({
torrentId: $(this).find('.detLink').attr('href').match(/torrent\/([^/]+)/)[1],
name: name,
seeders: parseInt($(this).find('td[align=\'right\']').eq(0).text(), 10),
leechers: parseInt($(this).find('td[align=\'right\']').eq(1).text(), 10),
magnetLink: $(this).find('a[title=\'Download this torrent using magnet\']').attr('href'),
category: parseInt($(this).find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0],
10),
subcategory: parseInt($(this).find('a[title=\'More from this category\']').eq(1).attr('href').match(/\d+$/)[0],
10),
size: parseSize(sizeMatcher[1])
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error('Failed loading body'));
}
const details = $('div[id=\'details\']');
const col1 = details.find('dl[class=\'col1\']');
const imdbIdMatch = col1.html().match(/imdb\.com\/title\/tt(\d+)/i);
const torrent = {
name: $('div[id=\'title\']').text().trim(),
seeders: parseInt(details.find('dt:contains(\'Seeders:\')').next().text(), 10),
leechers: parseInt(details.find('dt:contains(\'Leechers:\')').next().text(), 10),
magnetLink: details.find('a[title=\'Get this torrent\']').attr('href'),
infoHash: decode(details.find('a[title=\'Get this torrent\']').attr('href')).infoHash,
category: Categories.VIDEO.ALL,
subcategory: parseInt(col1.find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0], 10),
size: parseSize(details.find('dt:contains(\'Size:\')').next().text().match(/(\d+)(?:.?Bytes)/)[1]),
uploadDate: new Date(details.find('dt:contains(\'Uploaded:\')').next().text()),
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`
};
resolve(torrent);
});
}
function parseSize(sizeText) {
if (!sizeText) {
return undefined;
}
let scale = 1;
if (sizeText.includes('GiB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MiB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('KiB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText) * scale);
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { torrent, search, browse, dumps, Categories };

View File

@@ -0,0 +1,181 @@
const moment = require('moment');
const needle = require('needle');
const Bottleneck = require('bottleneck');
const { ungzip } = require('node-gzip');
const LineByLineReader = require('line-by-line');
const fs = require('fs');
const thepiratebay = require('./thepiratebay_api.js');
const bing = require('nodejs-bing');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
const NAME = 'ThePirateBay';
const CSV_FILE_PATH = '/tmp/tpb_dump.csv';
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
const lastScraped = await repository.getProvider({ name: NAME });
const lastDump = { updatedAt: 2147000000 };
//const checkPoint = moment('2016-06-17 00:00:00', 'YYYY-MMM-DD HH:mm:ss').toDate();
//const lastDump = await thepiratebay.dumps().then((dumps) => dumps.sort((a, b) => b.updatedAt - a.updatedAt)[0]);
if (!lastScraped.lastScraped || lastScraped.lastScraped < lastDump.updatedAt) {
console.log(`starting to scrape tpb dump: ${JSON.stringify(lastDump)}`);
await downloadDump(lastDump);
let entriesProcessed = 0;
const lr = new LineByLineReader(CSV_FILE_PATH);
lr.on('line', (line) => {
if (line.includes("#ADDED")) {
return;
}
if (entriesProcessed % 1000 === 0) {
console.log(`Processed ${entriesProcessed} entries`);
}
const row = line.match(/(?<=^|;)(".*"|[^;]+)(?=;|$)/g);
if (row.length !== 4) {
console.log(`Invalid row: ${line}`);
return;
}
const torrent = {
uploadDate: moment(row[0], 'YYYY-MMM-DD HH:mm:ss').toDate(),
infoHash: Buffer.from(row[1], 'base64').toString('hex'),
title: row[2]
.replace(/^"|"$/g, '')
.replace(/&amp;/g, '&')
.replace(/&\w{2,6};/g, ' ')
.replace(/\s+/g, ' ')
.trim(),
size: parseInt(row[3], 10)
};
// if (torrent.uploadDate > checkPoint) {
// entriesProcessed++;
// return;
// }
if (lastScraped.lastScraped && lastScraped.lastScraped > torrent.uploadDate) {
// torrent was already scraped previously, skipping
return;
}
if (!limiter.empty()) {
lr.pause()
}
limiter.schedule(() => processTorrentRecord(torrent)
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
.then(() => limiter.empty())
.then((empty) => empty && lr.resume())
.then(() => entriesProcessed++);
});
lr.on('error', (err) => {
console.log(err);
});
lr.on('end', () => {
fs.unlink(CSV_FILE_PATH, (error) => console.warn(error));
//repository.updateProvider({ name: NAME, lastScraped: lastDump.updatedAt });
console.log(`finished to scrape tpb dump: ${JSON.stringify(lastDump)}!`);
});
}
}
const allowedCategories = [
thepiratebay.Categories.VIDEO.MOVIES,
thepiratebay.Categories.VIDEO.MOVIES_HD,
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
thepiratebay.Categories.VIDEO.MOVIES_3D,
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
const seriesCategories = [
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return;
}
const torrentFound = await findTorrent(record);
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: record.infoHash,
provider: NAME,
torrentId: record.torrentId,
title: torrentFound.name,
size: torrentFound.size,
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate || record.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
async function findTorrent(record) {
return findTorrentInSource(record)
.catch(() => findTorrentViaBing(record));
}
async function findTorrentInSource(record) {
let page = 0;
let torrentFound;
while (!torrentFound && page < 5) {
const torrents = await thepiratebay.search(record.title.replace(/[\W\s]+/, ' '), { page: page });
torrentFound = torrents.filter(torrent => torrent.magnetLink.toLowerCase().includes(record.infoHash))[0];
page = torrents.length === 0 ? 1000 : page + 1;
}
if (!torrentFound) {
return Promise.reject(new Error(`Failed to find torrent ${record.title}`));
}
return Promise.resolve(torrentFound)
.then((torrent) => thepiratebay.torrent(torrent.torrentId));
}
async function findTorrentViaBing(record) {
return bing.web(`${record.infoHash}`)
.then((results) => results
.find(result => result.description.includes('Direct download via magnet link') ||
result.description.includes('Get this torrent')))
.then((result) => {
if (!result) {
throw new Error(`Failed to find torrent ${record.title}`);
}
return result.link.match(/torrent\/(\w+)\//)[1];
})
.then((torrentId) => thepiratebay.torrent(torrentId))
}
function downloadDump(dump) {
try {
if (fs.existsSync(CSV_FILE_PATH)) {
console.log('dump file already exist...');
return;
}
} catch (err) {
console.error(err)
}
console.log('downloading dump file...');
return needle('get', dump.url, { open_timeout: 2000, output: '/tmp/tpb_dump.gz' })
.then((response) => response.body)
.then((body) => {
console.log('unzipping dump file...');
return ungzip(body);
})
.then((unzipped) => {
console.log('writing dump file...');
return fs.promises.writeFile(CSV_FILE_PATH, unzipped);
})
}
module.exports = { scrape };

View File

@@ -0,0 +1,85 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const thepiratebay = require('./thepiratebay_api.js');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const {
createTorrentEntry,
createSkipTorrentEntry,
getStoredTorrentEntry,
updateTorrentSeeders
} = require('../../lib/torrentEntries');
const NAME = 'ThePirateBay';
const UNTIL_PAGE = 20;
const limiter = new Bottleneck({ maxConcurrent: 40 });
const allowedCategories = [
thepiratebay.Categories.VIDEO.MOVIES,
thepiratebay.Categories.VIDEO.MOVIES_HD,
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
thepiratebay.Categories.VIDEO.MOVIES_3D,
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
const seriesCategories = [
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
const latestTorrents = await getLatestTorrents();
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
.then(() => {
lastScrape.lastScraped = scrapeStart;
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
return repository.updateProvider(lastScrape);
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function getLatestTorrents() {
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
.then(entries => entries.reduce((a, b) => a.concat(b), []));
}
async function getLatestTorrentsForCategory(category, page = 0) {
return thepiratebay.browse(({ category, page }))
.then(torrents => torrents.length && page < UNTIL_PAGE
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
: torrents)
.catch(() => []);
}
async function processTorrentRecord(record) {
if (await getStoredTorrentEntry(record)) {
return updateTorrentSeeders(record);
}
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.name.replace(/\t|\s+/g, ' '),
size: torrentFound.size,
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
module.exports = { scrape };

View File

@@ -0,0 +1,112 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const LineByLineReader = require('line-by-line');
const fs = require('fs');
const decode = require('magnet-uri');
const thepiratebay = require('./thepiratebay_api.js');
const { Type } = require('../../lib/types');
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
const NAME = 'ThePirateBay';
const CSV_FILE_PATH = '/tmp/tpb.csv';
const limiter = new Bottleneck({ maxConcurrent: 40 });
async function scrape() {
console.log(`starting to scrape tpb dump...`);
//const checkPoint = moment('2013-06-16 00:00:00', 'YYYY-MMM-DD HH:mm:ss').toDate();
const checkPoint = 4115000;
let entriesProcessed = 0;
const lr = new LineByLineReader(CSV_FILE_PATH);
lr.on('line', (line) => {
if (entriesProcessed % 1000 === 0) {
console.log(`Processed ${entriesProcessed} entries`);
}
if (entriesProcessed <= checkPoint) {
entriesProcessed++;
return;
}
const row = line.match(/(?<=^|,)(".*"|[^,]*)(?=,|$)/g);
if (row.length !== 10) {
console.log(`Invalid row: ${line}`);
return;
}
const torrent = {
torrentId: row[0],
title: row[1]
.replace(/^"|"$/g, '')
.replace(/&amp;/g, '&')
.replace(/&\w{2,6};/g, ' ')
.replace(/\s+/g, ' ')
.trim(),
size: parseInt(row[2], 10),
category: row[4],
subcategory: row[5],
infoHash: row[7].toLowerCase() || decode(row[9]).infoHash,
magnetLink: row[9],
uploadDate: moment(row[8]).toDate(),
};
if (!limiter.empty()) {
lr.pause()
}
limiter.schedule(() => processTorrentRecord(torrent)
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
.then(() => limiter.empty())
.then((empty) => empty && lr.resume())
.then(() => entriesProcessed++);
});
lr.on('error', (err) => {
console.log(err);
});
lr.on('end', () => {
console.log(`finished to scrape tpb dump!`);
});
}
const allowedCategories = [
thepiratebay.Categories.VIDEO.MOVIES,
thepiratebay.Categories.VIDEO.MOVIES_HD,
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
thepiratebay.Categories.VIDEO.MOVIES_3D,
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
const seriesCategories = [
thepiratebay.Categories.VIDEO.TV_SHOWS,
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
];
async function processTorrentRecord(record) {
if (record.category !== 'Video') {
return createSkipTorrentEntry(record);
}
if (await getStoredTorrentEntry(record)) {
return;
}
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
return createSkipTorrentEntry(record);
}
const torrent = {
infoHash: record.infoHash,
provider: NAME,
torrentId: record.torrentId,
title: torrentFound.name,
size: torrentFound.size,
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
}
module.exports = { scrape };