mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
Added back original scrapers, integrated with PGSQL
This commit is contained in:
182
scraper/scrapers/1337x/1337x_api.js
Normal file
182
scraper/scrapers/1337x/1337x_api.js
Normal file
@@ -0,0 +1,182 @@
|
||||
const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const Sugar = require('sugar-date');
|
||||
const decode = require('magnet-uri');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { escapeHTML } = require('../../lib/metadata');
|
||||
const { getRandomUserAgent } = require('../../lib/requestHelper');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://1337x.to'
|
||||
];
|
||||
const defaultTimeout = 10000;
|
||||
const maxSearchPage = 50;
|
||||
|
||||
const Categories = {
|
||||
MOVIE: 'Movies',
|
||||
TV: 'TV',
|
||||
ANIME: 'Anime',
|
||||
DOCUMENTARIES: 'Documentaries',
|
||||
APPS: 'Apps',
|
||||
GAMES: 'Games',
|
||||
MUSIC: 'Music',
|
||||
PORN: 'XXX',
|
||||
OTHER: 'Other',
|
||||
};
|
||||
|
||||
function torrent(torrentId, config = {}, retries = 2) {
|
||||
if (!torrentId || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} query`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
|
||||
|
||||
return Promises.first(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => ({ torrentId: slug, ...torrent }))
|
||||
.catch((err) => torrent(slug, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(keyword, config = {}, retries = 2) {
|
||||
if (!keyword || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${keyword} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
const extendToPage = Math.min(maxSearchPage, (config.extendToPage || 1))
|
||||
const requestUrl = proxyUrl => category
|
||||
? `${proxyUrl}/category-search/${keyword}/${category}/${page}/`
|
||||
: `${proxyUrl}/search/${keyword}/${page}/`;
|
||||
|
||||
return Promises.first(proxyList
|
||||
.map(proxyUrl => singleRequest(requestUrl(proxyUrl), config)))
|
||||
.then(body => parseTableBody(body))
|
||||
.then(torrents => torrents.length === 40 && page < extendToPage
|
||||
? search(keyword, { ...config, page: page + 1 }).catch(() => [])
|
||||
.then(nextTorrents => torrents.concat(nextTorrents))
|
||||
: torrents)
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed browse request`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
const sort = config.sort;
|
||||
const requestUrl = proxyUrl => sort
|
||||
? `${proxyUrl}/sort-cat/${category}/${sort}/desc/${page}/`
|
||||
: `${proxyUrl}/cat/${category}/${page}/`;
|
||||
|
||||
return Promises.first(proxyList
|
||||
.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
const options = { userAgent: getRandomUserAgent(), open_timeout: timeout, follow: 2 };
|
||||
|
||||
return needle('get', requestUrl, options)
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (!body) {
|
||||
throw new Error(`No body: ${requestUrl}`);
|
||||
} else if (body.includes('502: Bad gateway') ||
|
||||
body.includes('403 Forbidden') ||
|
||||
!(body.includes('1337x</title>'))) {
|
||||
throw new Error(`Invalid body contents: ${requestUrl}`);
|
||||
}
|
||||
return body;
|
||||
});
|
||||
}
|
||||
|
||||
function parseTableBody(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const torrents = [];
|
||||
|
||||
$('.table > tbody > tr').each((i, element) => {
|
||||
const row = $(element);
|
||||
torrents.push({
|
||||
name: row.find('a').eq(1).text(),
|
||||
torrentId: row.find('a').eq(1).attr('href').replace('/torrent/', ''),
|
||||
seeders: parseInt(row.children('td.coll-2').text()),
|
||||
leechers: parseInt(row.children('td.coll-3').text()),
|
||||
size: parseSize(row.children('td.coll-4').text())
|
||||
});
|
||||
});
|
||||
|
||||
resolve(torrents);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTorrentPage(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const details = $('.torrent-detail-page');
|
||||
const magnetLink = details.find('a:contains(\'Magnet Download\')').attr('href');
|
||||
const imdbIdMatch = details.find('div[id=\'description\']').html().match(/imdb\.com\/title\/(tt\d+)/i);
|
||||
|
||||
const torrent = {
|
||||
name: escapeHTML(decode(magnetLink).name.replace(/\+/g, ' ')),
|
||||
infoHash: decode(magnetLink).infoHash,
|
||||
magnetLink: magnetLink,
|
||||
seeders: parseInt(details.find('strong:contains(\'Seeders\')').next().text(), 10),
|
||||
leechers: parseInt(details.find('strong:contains(\'Leechers\')').next().text(), 10),
|
||||
category: details.find('strong:contains(\'Category\')').next().text(),
|
||||
languages: details.find('strong:contains(\'Language\')').next().text(),
|
||||
size: parseSize(details.find('strong:contains(\'Total size\')').next().text()),
|
||||
uploadDate: parseDate(details.find('strong:contains(\'Date uploaded\')').next().text()),
|
||||
imdbId: imdbIdMatch && imdbIdMatch[1],
|
||||
files: details.find('div[id=\'files\']').first().find('li')
|
||||
.map((i, elem) => $(elem).text())
|
||||
.map((i, text) => ({
|
||||
fileIndex: i,
|
||||
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
|
||||
path: text.match(/^(.+)\s\(.+\)$/)[1],
|
||||
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
|
||||
})).get()
|
||||
};
|
||||
resolve(torrent);
|
||||
});
|
||||
}
|
||||
|
||||
function parseDate(dateString) {
|
||||
if (/decade.*ago/i.test(dateString)) {
|
||||
return Sugar.Date.create('10 years ago');
|
||||
}
|
||||
return Sugar.Date.create(dateString);
|
||||
}
|
||||
|
||||
function parseSize(sizeText) {
|
||||
if (!sizeText) {
|
||||
return undefined;
|
||||
}
|
||||
let scale = 1;
|
||||
if (sizeText.includes('GB')) {
|
||||
scale = 1024 * 1024 * 1024
|
||||
} else if (sizeText.includes('MB')) {
|
||||
scale = 1024 * 1024;
|
||||
} else if (sizeText.includes('KB')) {
|
||||
scale = 1024;
|
||||
}
|
||||
return Math.floor(parseFloat(sizeText.replace(/,/g, '')) * scale);
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse, Categories };
|
||||
111
scraper/scrapers/1337x/1337x_scraper.js
Normal file
111
scraper/scrapers/1337x/1337x_scraper.js
Normal file
@@ -0,0 +1,111 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const leetx = require('./1337x_api');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { createTorrentEntry, checkAndUpdateTorrent } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = '1337x';
|
||||
const UNTIL_PAGE = 10;
|
||||
const TYPE_MAPPING = typeMapping();
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 10 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
return scrapeLatestTorrents()
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return lastScrape.save();
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function updateSeeders(torrent) {
|
||||
return limiter.schedule(() => leetx.torrent(torrent.torrentId));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
leetx.Categories.MOVIE,
|
||||
leetx.Categories.TV,
|
||||
leetx.Categories.ANIME,
|
||||
leetx.Categories.DOCUMENTARIES
|
||||
];
|
||||
|
||||
return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category)))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||
return leetx.browse(({ category, page }))
|
||||
.catch(error => {
|
||||
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||
return Promise.resolve([]);
|
||||
})
|
||||
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||
.then(resolved => resolved.length > 0 && page < untilPage(category)
|
||||
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||
: Promise.resolve());
|
||||
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await checkAndUpdateTorrent({ provider: NAME, ...record })) {
|
||||
return record;
|
||||
}
|
||||
|
||||
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
|
||||
|
||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||
return Promise.resolve('Invalid torrent record');
|
||||
}
|
||||
if (isNaN(torrentFound.uploadDate)) {
|
||||
console.warn(`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`);
|
||||
return;
|
||||
}
|
||||
if (await checkAndUpdateTorrent(torrentFound)) {
|
||||
return torrentFound;
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: torrentFound.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: torrentFound.torrentId,
|
||||
title: torrentFound.name.replace(/\t|\s+/g, ' ').trim(),
|
||||
type: TYPE_MAPPING[torrentFound.category],
|
||||
size: torrentFound.size,
|
||||
seeders: torrentFound.seeders,
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
imdbId: torrentFound.imdbId,
|
||||
languages: torrentFound.languages || undefined
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
function typeMapping() {
|
||||
const mapping = {};
|
||||
mapping[leetx.Categories.MOVIE] = Type.MOVIE;
|
||||
mapping[leetx.Categories.DOCUMENTARIES] = Type.SERIES;
|
||||
mapping[leetx.Categories.TV] = Type.SERIES;
|
||||
mapping[leetx.Categories.ANIME] = Type.ANIME;
|
||||
return mapping;
|
||||
}
|
||||
|
||||
function untilPage(category) {
|
||||
if (leetx.Categories.ANIME === category) {
|
||||
return 5;
|
||||
}
|
||||
if (leetx.Categories.DOCUMENTARIES === category) {
|
||||
return 1;
|
||||
}
|
||||
return UNTIL_PAGE;
|
||||
}
|
||||
|
||||
module.exports = { scrape, updateSeeders, NAME };
|
||||
Reference in New Issue
Block a user