Initial commit

This commit is contained in:
TheBeastLT
2019-03-13 22:40:09 +01:00
parent 99c2caf1aa
commit 5a122683d7
12 changed files with 3070 additions and 0 deletions

View File

@@ -0,0 +1,42 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const defaultUrl = 'https://horriblesubs.info';
const defaultTimeout = 5000;
function _getContent(url, config = {},) {
const baseUrl = config.proxyUrl || defaultUrl;
const timeout = config.timeout || defaultTimeout;
return needle('get', `${baseUrl}${url}`, { open_timeout: timeout, follow: 2 })
.then((response) => response.body)
.then((body) => cheerio.load(body))
}
function _getAnimeId(showInfo) {
return _getContent(showInfo.url).then($ => {
const text = $('div.entry-content').find('script').html();
showInfo.id = text.match(/var hs_showid = (\d+)/)[1];
return showInfo
})
}
function allShows(config = {}) {
return _getContent('/shows', config)
.then(($) => $('div[class=\'ind-show\']')
.map((index, element) => $(element).children('a'))
.map((index, element) => ({
title: element.attr('title'),
url: `${config.proxyUrl || defaultUrl}${element.attr('href')}`
})).get());
}
function showData(showInfo) {
return _getAnimeId(showInfo)
.then((showInfo) => )
}
module.exports = { allShows };

View File

@@ -0,0 +1,179 @@
const cheerio = require('cheerio');
const needle = require('needle');
const moment = require('moment');
const defaultProxies = ['https://pirateproxy.sh', 'https://thepiratebay.org'];
const dumpUrl = '/static/dump/csv/';
const defaultTimeout = 5000;
const errors = {
REQUEST_ERROR: { code: 'REQUEST_ERROR' },
PARSER_ERROR: { code: 'PARSER_ERROR' }
};
Categories = {
AUDIO: {
ALL: 100,
MUSIC: 101,
AUDIO_BOOKS: 102,
SOUND_CLIPS: 103,
FLAC: 104,
OTHER: 199
},
VIDEO: {
ALL: 200,
MOVIES: 201,
MOVIES_DVDR: 202,
MUSIC_VIDEOS: 203,
MOVIE_CLIPS: 204,
TV_SHOWS: 205,
HANDHELD: 206,
MOVIES_HD: 207,
TV_SHOWS_HD: 208,
MOVIES_3D: 209,
OTHER: 299
},
APPS: {
ALL: 300,
WINDOWS: 301,
MAC: 302,
UNIX: 303,
HANDHELD: 304,
IOS: 305,
ANDROID: 306,
OTHER_OS: 399
},
GAMES: {
ALL: 400,
PC: 401,
MAC: 402,
PSx: 403,
XBOX360: 404,
Wii: 405,
HANDHELD: 406,
IOS: 407,
ANDROID: 408,
OTHER: 499
},
PORN: {
ALL: 500,
MOVIES: 501,
MOVIES_DVDR: 502,
PICTURES: 503,
GAMES: 504,
MOVIES_HD: 505,
MOVIE_CLIPS: 506,
OTHER: 599
},
OTHER: {
ALL: 600,
E_BOOKS: 601,
COMICS: 602,
PICTURES: 603,
COVERS: 604,
PHYSIBLES: 605,
OTHER: 699
}
};
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const proxyList = config.proxyList || defaultProxies;
const page = config.page || 0;
const category = config.cat || 0;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
.then((body) => parseBody(body))
.catch(() => search(keyword, config, retries - 1));
}
function dumps(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed dump search`));
}
const proxyList = config.proxyList || defaultProxies;
return raceFirstSuccessful(proxyList
.map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config)
.then((body) => body.match(/(<a href="[^"]+">[^<]+<\/a>.+\d)/g)
.map((group) => ({
url: `${proxyUrl}${dumpUrl}` + group.match(/<a href="([^"]+)">/)[1],
updatedAt: moment(group.match(/\s+([\w-]+\s+[\d:]+)\s+\d+$/)[1], 'DD-MMM-YYYY HH:mm').toDate()
})))))
.catch(() => dumps(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
return new Promise(((resolve, reject) => {
needle.get(requestUrl,
{ open_timeout: timeout, follow: 2 },
(err, res, body) => {
if (err || !body) {
reject(err || errors.REQUEST_ERROR);
} else if (body.includes('Access Denied') && !body.includes('<title>The Pirate Bay')) {
console.log(`Access Denied: ${url}`);
reject(new Error(`Access Denied: ${url}`));
} else if (body.includes('502: Bad gateway') ||
body.includes('403 Forbidden') ||
body.includes('Database maintenance') ||
body.includes('Origin DNS error') ||
!body.includes('<title>The Pirate Bay')) {
reject(errors.REQUEST_ERROR);
}
resolve(body);
});
}));
}
function parseBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error(errors.PARSER_ERROR));
}
const torrents = [];
$('table[id=\'searchResult\'] tr').each(function() {
const name = $(this).find('.detLink').text();
if (!name) {
return;
}
torrents.push({
name: name,
seeders: parseInt($(this).find('td[align=\'right\']').eq(0).text(), 10),
leechers: parseInt($(this).find('td[align=\'right\']').eq(1).text(), 10),
magnetLink: $(this).find('a[title=\'Download this torrent using magnet\']').attr('href'),
category: parseInt($(this).find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0], 10),
subcategory: parseInt($(this).find('a[title=\'More from this category\']').eq(1).attr('href').match(/\d+$/)[0], 10)
});
});
resolve(torrents);
});
}
function raceFirstSuccessful(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
module.exports = { search, dumps, Categories };

167
scrapers/piratebay_dump.js Normal file
View File

@@ -0,0 +1,167 @@
const moment = require('moment');
const needle = require('needle');
const Bottleneck = require('bottleneck');
const { ungzip } = require('node-gzip');
const LineByLineReader = require('line-by-line');
const fs = require('fs');
const { parse } = require('parse-torrent-title');
const pirata = require('./api/thepiratebay');
const { torrentFiles } = require('../lib/torrent');
const repository = require('../lib/repository');
const { getImdbId } = require('../lib/metadata');
const NAME = 'thepiratebay';
const CSV_FILE_PATH = '/tmp/tpb_dump.csv';
const limiter = new Bottleneck({maxConcurrent: 40});
async function scrape() {
const title = 'Я'
+ '+(2014)_1280x720-raroch.mp4'
.replace(/^"|"$/g, '')
.normalize('NFKD') // normalize non-ASCII characters
.replace(/[\u0300-\u036F]/g, '')
.replace(/&\w{2,6};/g, ' ')
.replace(/\s+/g, ' ')
.replace(/[\W\s]+/, ' ');
const titleInfo = parse(title);
const imdbId = await getImdbId({
name: titleInfo.title.toLowerCase(),
year: titleInfo.year
});
const lastScraped = await repository.getProvider({ name: NAME });
const lastDump = await pirata.dumps().then((dumps) => dumps.sort((a, b) => b.updatedAt - a.updatedAt)[0]);
if (!lastScraped.lastScraped || lastScraped.lastScraped < lastDump.updatedAt) {
console.log(`starting to scrape tpb dump: ${JSON.stringify(lastDump)}`);
//await downloadDump(lastDump);
const lr = new LineByLineReader(CSV_FILE_PATH);
lr.on('line', (line) => {
if (line.includes("#ADDED")) {
return;
}
const row = line.match(/(?<=^|;)(".*"|[^;]+)(?=;|$)/g);
const torrent = {
uploadDate: moment(row[0], 'YYYY-MMM-DD HH:mm:ss').toDate(),
infoHash: Buffer.from(row[1], 'base64').toString('hex'),
title: row[2]
.replace(/^"|"$/g, '')
.replace(/&\w{2,6};/g, ' ')
.replace(/\s+/g, ' '),
size: parseInt(row[3], 10)
};
if (!limiter.empty()) {
lr.pause()
}
limiter.schedule(() => processTorrentRecord(torrent)
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
.then(() => limiter.empty())
.then((empty) => empty && lr.resume());
});
lr.on('error', (err) => {
console.log(err);
});
lr.on('end', () => {
fs.unlink(CSV_FILE_PATH);
updateProvider({ name: NAME, lastScraped: lastDump.updatedAt.toDate() });
console.log(`finished to scrape tpb dump: ${JSON.stringify(lastDump)}!`);
});
}
}
const allowedCategories = [
pirata.Categories.VIDEO.MOVIES,
pirata.Categories.VIDEO.MOVIES_HD,
pirata.Categories.VIDEO.MOVIES_DVDR,
pirata.Categories.VIDEO.MOVIES_3D,
pirata.Categories.VIDEO.TV_SHOWS,
pirata.Categories.VIDEO.TV_SHOWS_HD
];
const seriesCategories = [
pirata.Categories.VIDEO.TV_SHOWS,
pirata.Categories.VIDEO.TV_SHOWS_HD
];
async function processTorrentRecord(record) {
const persisted = await repository.getSkipTorrent(record)
.catch(() => repository.getTorrent(record)).catch(() => undefined);
if (persisted) {
return;
}
let page = 0;
let torrentFound;
while (!torrentFound && page < 5) {
const torrents = await pirata.search(record.title.replace(/[\W\s]+/, ' '), { page: page });
torrentFound = torrents.
filter(torrent => torrent.magnetLink.toLowerCase().includes(record.infoHash))[0];
page = torrents.length === 0 ? 1000 : page + 1;
}
if (!torrentFound) {
console.log(`not found: ${JSON.stringify(record)}`);
repository.createSkipTorrent(record);
return;
}
if (!allowedCategories.includes(torrentFound.subcategory)) {
console.log(`wrong category: ${torrentFound.name}`);
repository.createSkipTorrent(record);
return;
}
const type = seriesCategories.includes(torrentFound.subcategory) ? 'series' : 'movie';
console.log(`imdbId search: ${torrentFound.name}`);
const titleInfo = parse(torrentFound.name);
const imdbId = await getImdbId({
name: titleInfo.title.toLowerCase(),
year: titleInfo.year,
type: type
}).catch(() => undefined);
if (!imdbId) {
console.log(`imdbId not found: ${torrentFound.name}`);
repository.createFailedImdbTorrent(record);
return;
}
if (type === 'movie' || titleInfo.episode) {
repository.updateTorrent({
infoHash: record.infoHash,
provider: NAME,
title: torrentFound.name,
imdbId: imdbId,
uploadDate: record.uploadDate,
seeders: torrentFound.seeders,
});
return;
}
const files = await torrentFiles(record).catch(() => []);
if (!files || !files.length) {
console.log(`no video files found: ${torrentFound.name}`);
return;
}
repository.updateTorrent({
infoHash: record.infoHash,
provider: NAME,
title: torrentFound.name,
imdbId: imdbId,
uploadDate: record.uploadDate,
seeders: torrentFound.seeders,
files: files
})
}
function downloadDump(dump) {
console.log('downloading dump file...');
return needle('get', dump.url, { open_timeout: 2000, output: '/home/paulius/Downloads/tpb_dump.gz' })
.then((response) => response.body)
.then((body) => { console.log('unzipping dump file...'); return ungzip(body); })
.then((unzipped) => { console.log('writing dump file...'); return fs.promises.writeFile(CSV_FILE_PATH, unzipped); })
}
module.exports = { scrape };