mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
[scraper] updates scapers limiter usage
This commit is contained in:
29
scraper/lib/promises.js
Normal file
29
scraper/lib/promises.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
/**
|
||||||
|
* Exxecute promises in sequence one after another.
|
||||||
|
*/
|
||||||
|
async function sequence(promises) {
|
||||||
|
return promises.reduce((promise, func) =>
|
||||||
|
promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([]));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return first resolved promise as the result.
|
||||||
|
*/
|
||||||
|
function first(promises) {
|
||||||
|
return Promise.all(promises.map((p) => {
|
||||||
|
// If a request fails, count that as a resolution so it will keep
|
||||||
|
// waiting for other possible successes. If a request succeeds,
|
||||||
|
// treat it as a rejection so Promise.all immediately bails out.
|
||||||
|
return p.then(
|
||||||
|
(val) => Promise.reject(val),
|
||||||
|
(err) => Promise.resolve(err)
|
||||||
|
);
|
||||||
|
})).then(
|
||||||
|
// If '.all' resolved, we've just got an array of errors.
|
||||||
|
(errors) => Promise.reject(errors),
|
||||||
|
// If '.all' rejected, we've got the result we wanted.
|
||||||
|
(val) => Promise.resolve(val)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { sequence, first };
|
||||||
@@ -2,6 +2,7 @@ const cheerio = require('cheerio');
|
|||||||
const needle = require('needle');
|
const needle = require('needle');
|
||||||
const Sugar = require('sugar-date');
|
const Sugar = require('sugar-date');
|
||||||
const decode = require('magnet-uri');
|
const decode = require('magnet-uri');
|
||||||
|
const Promises = require('../../lib/promises');
|
||||||
|
|
||||||
const defaultProxies = [
|
const defaultProxies = [
|
||||||
'https://1337x.to'
|
'https://1337x.to'
|
||||||
@@ -27,7 +28,7 @@ function torrent(torrentId, config = {}, retries = 2) {
|
|||||||
const proxyList = config.proxyList || defaultProxies;
|
const proxyList = config.proxyList || defaultProxies;
|
||||||
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
|
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
|
||||||
.then((body) => parseTorrentPage(body))
|
.then((body) => parseTorrentPage(body))
|
||||||
.then((torrent) => ({ torrentId: slug, ...torrent }))
|
.then((torrent) => ({ torrentId: slug, ...torrent }))
|
||||||
@@ -41,7 +42,7 @@ function search(keyword, config = {}, retries = 2) {
|
|||||||
const proxyList = config.proxyList || defaultProxies;
|
const proxyList = config.proxyList || defaultProxies;
|
||||||
const page = config.page || 1;
|
const page = config.page || 1;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
|
||||||
.then((body) => parseTableBody(body))
|
.then((body) => parseTableBody(body))
|
||||||
.catch((err) => search(keyword, config, retries - 1));
|
.catch((err) => search(keyword, config, retries - 1));
|
||||||
@@ -55,7 +56,7 @@ function browse(config = {}, retries = 2) {
|
|||||||
const page = config.page || 1;
|
const page = config.page || 1;
|
||||||
const category = config.category;
|
const category = config.category;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
|
||||||
.then((body) => parseTableBody(body))
|
.then((body) => parseTableBody(body))
|
||||||
.catch((err) => browse(config, retries - 1));
|
.catch((err) => browse(config, retries - 1));
|
||||||
@@ -154,21 +155,4 @@ function parseSize(sizeText) {
|
|||||||
return Math.floor(parseFloat(sizeText) * scale);
|
return Math.floor(parseFloat(sizeText) * scale);
|
||||||
}
|
}
|
||||||
|
|
||||||
function raceFirstSuccessful(promises) {
|
|
||||||
return Promise.all(promises.map((p) => {
|
|
||||||
// If a request fails, count that as a resolution so it will keep
|
|
||||||
// waiting for other possible successes. If a request succeeds,
|
|
||||||
// treat it as a rejection so Promise.all immediately bails out.
|
|
||||||
return p.then(
|
|
||||||
(val) => Promise.reject(val),
|
|
||||||
(err) => Promise.resolve(err)
|
|
||||||
);
|
|
||||||
})).then(
|
|
||||||
// If '.all' resolved, we've just got an array of errors.
|
|
||||||
(errors) => Promise.reject(errors),
|
|
||||||
// If '.all' rejected, we've got the result we wanted.
|
|
||||||
(val) => Promise.resolve(val)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { torrent, search, browse, Categories };
|
module.exports = { torrent, search, browse, Categories };
|
||||||
|
|||||||
@@ -3,15 +3,11 @@ const Bottleneck = require('bottleneck');
|
|||||||
const leetx = require('./1337x_api');
|
const leetx = require('./1337x_api');
|
||||||
const { Type } = require('../../lib/types');
|
const { Type } = require('../../lib/types');
|
||||||
const repository = require('../../lib/repository');
|
const repository = require('../../lib/repository');
|
||||||
const {
|
const Promises = require('../../lib/promises');
|
||||||
createTorrentEntry,
|
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
|
||||||
createSkipTorrentEntry,
|
|
||||||
getStoredTorrentEntry,
|
|
||||||
updateTorrentSeeders
|
|
||||||
} = require('../../lib/torrentEntries');
|
|
||||||
|
|
||||||
const NAME = '1337x';
|
const NAME = '1337x';
|
||||||
const UNTIL_PAGE = 1;
|
const UNTIL_PAGE = 10;
|
||||||
const TYPE_MAPPING = typeMapping();
|
const TYPE_MAPPING = typeMapping();
|
||||||
|
|
||||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||||
@@ -21,17 +17,15 @@ async function scrape() {
|
|||||||
const lastScrape = await repository.getProvider({ name: NAME });
|
const lastScrape = await repository.getProvider({ name: NAME });
|
||||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||||
|
|
||||||
const latestTorrents = await getLatestTorrents();
|
return scrapeLatestTorrents()
|
||||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
|
||||||
.then(() => {
|
.then(() => {
|
||||||
lastScrape.lastScraped = scrapeStart;
|
lastScrape.lastScraped = scrapeStart;
|
||||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
|
||||||
return repository.updateProvider(lastScrape);
|
return repository.updateProvider(lastScrape);
|
||||||
})
|
})
|
||||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrents() {
|
async function scrapeLatestTorrents() {
|
||||||
const allowedCategories = [
|
const allowedCategories = [
|
||||||
leetx.Categories.MOVIE,
|
leetx.Categories.MOVIE,
|
||||||
leetx.Categories.TV,
|
leetx.Categories.TV,
|
||||||
@@ -39,32 +33,36 @@ async function getLatestTorrents() {
|
|||||||
leetx.Categories.DOCUMENTARIES
|
leetx.Categories.DOCUMENTARIES
|
||||||
];
|
];
|
||||||
|
|
||||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category)))
|
||||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrentsForCategory(category, page = 1) {
|
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||||
return leetx.browse(({ category: category, page: page }))
|
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
return leetx.browse(({ category, page }))
|
||||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||||
: torrents)
|
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
||||||
.catch(() => []);
|
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||||
|
: Promise.resolve())
|
||||||
|
.catch(error => {
|
||||||
|
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||||
|
return Promise.resolve();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processTorrentRecord(record) {
|
async function processTorrentRecord(record) {
|
||||||
if (await getStoredTorrentEntry(record)) {
|
|
||||||
return updateTorrentSeeders(record);
|
|
||||||
}
|
|
||||||
|
|
||||||
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
|
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
|
||||||
|
|
||||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||||
return createSkipTorrentEntry(record);
|
return Promise.resolve('Invalid torrent record');
|
||||||
}
|
}
|
||||||
if (isNaN(torrentFound.uploadDate)) {
|
if (isNaN(torrentFound.uploadDate)) {
|
||||||
console.warn(`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`);
|
console.warn(`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (await getStoredTorrentEntry(record)) {
|
||||||
|
return updateTorrentSeeders(record);
|
||||||
|
}
|
||||||
|
|
||||||
const torrent = {
|
const torrent = {
|
||||||
infoHash: torrentFound.infoHash,
|
infoHash: torrentFound.infoHash,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ const cheerio = require('cheerio');
|
|||||||
const needle = require('needle');
|
const needle = require('needle');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
const decode = require('magnet-uri');
|
const decode = require('magnet-uri');
|
||||||
|
const Promises = require('../../lib/promises');
|
||||||
|
|
||||||
const defaultProxies = [
|
const defaultProxies = [
|
||||||
'https://katcr.co'
|
'https://katcr.co'
|
||||||
@@ -26,7 +27,7 @@ function torrent(torrentId, config = {}, retries = 2) {
|
|||||||
}
|
}
|
||||||
const proxyList = config.proxyList || defaultProxies;
|
const proxyList = config.proxyList || defaultProxies;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
||||||
.then((body) => parseTorrentPage(body))
|
.then((body) => parseTorrentPage(body))
|
||||||
.then((torrent) => ({ torrentId, ...torrent }))
|
.then((torrent) => ({ torrentId, ...torrent }))
|
||||||
@@ -41,7 +42,7 @@ function search(keyword, config = {}, retries = 2) {
|
|||||||
const page = config.page || 1;
|
const page = config.page || 1;
|
||||||
const category = config.category;
|
const category = config.category;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
||||||
.then((body) => parseTableBody(body))
|
.then((body) => parseTableBody(body))
|
||||||
.catch((err) => search(keyword, config, retries - 1));
|
.catch((err) => search(keyword, config, retries - 1));
|
||||||
@@ -55,7 +56,7 @@ function browse(config = {}, retries = 2) {
|
|||||||
const page = config.page || 1;
|
const page = config.page || 1;
|
||||||
const category = config.category;
|
const category = config.category;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/category/${category}/page/${page}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/category/${category}/page/${page}`, config)))
|
||||||
.then((body) => parseTableBody(body))
|
.then((body) => parseTableBody(body))
|
||||||
.catch((err) => browse(config, retries - 1));
|
.catch((err) => browse(config, retries - 1));
|
||||||
@@ -94,9 +95,12 @@ function parseTableBody(body) {
|
|||||||
|
|
||||||
$('.table > tbody > tr').each((i, element) => {
|
$('.table > tbody > tr').each((i, element) => {
|
||||||
const row = $(element);
|
const row = $(element);
|
||||||
|
const magnetLink = row.find('a[title="Torrent magnet link"]').attr('href');
|
||||||
torrents.push({
|
torrents.push({
|
||||||
torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''),
|
|
||||||
name: row.find('a[class="torrents_table__torrent_title"]').first().children('b').text(),
|
name: row.find('a[class="torrents_table__torrent_title"]').first().children('b').text(),
|
||||||
|
infoHash: decode(magnetLink).infoHash,
|
||||||
|
magnetLink: magnetLink,
|
||||||
|
torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''),
|
||||||
category: row.find('span[class="torrents_table__upload_info"]').first().children('a').first().attr('href')
|
category: row.find('span[class="torrents_table__upload_info"]').first().children('a').first().attr('href')
|
||||||
.match(/category\/([^\/]+)/)[1],
|
.match(/category\/([^\/]+)/)[1],
|
||||||
seeders: parseInt(row.find('td[data-title="Seed"]').first().text()),
|
seeders: parseInt(row.find('td[data-title="Seed"]').first().text()),
|
||||||
@@ -167,21 +171,4 @@ function parseSize(sizeText) {
|
|||||||
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
|
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
|
||||||
}
|
}
|
||||||
|
|
||||||
function raceFirstSuccessful(promises) {
|
|
||||||
return Promise.all(promises.map((p) => {
|
|
||||||
// If a request fails, count that as a resolution so it will keep
|
|
||||||
// waiting for other possible successes. If a request succeeds,
|
|
||||||
// treat it as a rejection so Promise.all immediately bails out.
|
|
||||||
return p.then(
|
|
||||||
(val) => Promise.reject(val),
|
|
||||||
(err) => Promise.resolve(err)
|
|
||||||
);
|
|
||||||
})).then(
|
|
||||||
// If '.all' resolved, we've just got an array of errors.
|
|
||||||
(errors) => Promise.reject(errors),
|
|
||||||
// If '.all' rejected, we've got the result we wanted.
|
|
||||||
(val) => Promise.resolve(val)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { torrent, search, browse, Categories };
|
module.exports = { torrent, search, browse, Categories };
|
||||||
|
|||||||
@@ -3,15 +3,11 @@ const Bottleneck = require('bottleneck');
|
|||||||
const kickass = require('./kickass_api');
|
const kickass = require('./kickass_api');
|
||||||
const { Type } = require('../../lib/types');
|
const { Type } = require('../../lib/types');
|
||||||
const repository = require('../../lib/repository');
|
const repository = require('../../lib/repository');
|
||||||
const {
|
const Promises = require('../../lib/promises');
|
||||||
createTorrentEntry,
|
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
|
||||||
createSkipTorrentEntry,
|
|
||||||
getStoredTorrentEntry,
|
|
||||||
updateTorrentSeeders
|
|
||||||
} = require('../../lib/torrentEntries');
|
|
||||||
|
|
||||||
const NAME = 'KickassTorrents';
|
const NAME = 'KickassTorrents';
|
||||||
const UNTIL_PAGE = 1;
|
const UNTIL_PAGE = 10;
|
||||||
const TYPE_MAPPING = typeMapping();
|
const TYPE_MAPPING = typeMapping();
|
||||||
|
|
||||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||||
@@ -21,33 +17,36 @@ async function scrape() {
|
|||||||
const lastScrape = await repository.getProvider({ name: NAME });
|
const lastScrape = await repository.getProvider({ name: NAME });
|
||||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||||
|
|
||||||
const latestTorrents = await getLatestTorrents();
|
return scrapeLatestTorrents()
|
||||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
|
||||||
.then(() => {
|
.then(() => {
|
||||||
lastScrape.lastScraped = scrapeStart;
|
lastScrape.lastScraped = scrapeStart;
|
||||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
|
||||||
return repository.updateProvider(lastScrape);
|
return repository.updateProvider(lastScrape);
|
||||||
})
|
})
|
||||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrents() {
|
async function scrapeLatestTorrents() {
|
||||||
const allowedCategories = [
|
const allowedCategories = [
|
||||||
kickass.Categories.MOVIE,
|
kickass.Categories.MOVIE,
|
||||||
kickass.Categories.TV,
|
kickass.Categories.TV,
|
||||||
kickass.Categories.ANIME,
|
kickass.Categories.ANIME,
|
||||||
];
|
];
|
||||||
|
|
||||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category)))
|
||||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrentsForCategory(category, page = 1) {
|
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||||
|
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||||
return kickass.browse(({ category, page }))
|
return kickass.browse(({ category, page }))
|
||||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
||||||
: torrents)
|
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||||
.catch(() => []);
|
: Promise.resolve())
|
||||||
|
.catch(error => {
|
||||||
|
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||||
|
return Promise.resolve();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processTorrentRecord(record) {
|
async function processTorrentRecord(record) {
|
||||||
@@ -58,7 +57,7 @@ async function processTorrentRecord(record) {
|
|||||||
const torrentFound = await kickass.torrent(record.torrentId).catch(() => undefined);
|
const torrentFound = await kickass.torrent(record.torrentId).catch(() => undefined);
|
||||||
|
|
||||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||||
return createSkipTorrentEntry(record);
|
return Promise.resolve('Invalid torrent record');
|
||||||
}
|
}
|
||||||
|
|
||||||
const torrent = {
|
const torrent = {
|
||||||
@@ -73,7 +72,7 @@ async function processTorrentRecord(record) {
|
|||||||
seeders: torrentFound.seeders,
|
seeders: torrentFound.seeders,
|
||||||
};
|
};
|
||||||
|
|
||||||
return createTorrentEntry(torrent);
|
return createTorrentEntry(torrent).then(() => torrent);
|
||||||
}
|
}
|
||||||
|
|
||||||
function typeMapping() {
|
function typeMapping() {
|
||||||
|
|||||||
@@ -4,11 +4,8 @@ const rarbg = require('rarbg-api');
|
|||||||
const decode = require('magnet-uri');
|
const decode = require('magnet-uri');
|
||||||
const { Type } = require('../../lib/types');
|
const { Type } = require('../../lib/types');
|
||||||
const repository = require('../../lib/repository');
|
const repository = require('../../lib/repository');
|
||||||
const {
|
const Promises = require('../../lib/promises');
|
||||||
createTorrentEntry,
|
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
|
||||||
getStoredTorrentEntry,
|
|
||||||
updateTorrentSeeders
|
|
||||||
} = require('../../lib/torrentEntries');
|
|
||||||
|
|
||||||
const NAME = 'RARBG';
|
const NAME = 'RARBG';
|
||||||
|
|
||||||
@@ -20,17 +17,15 @@ async function scrape() {
|
|||||||
const lastScrape = await repository.getProvider({ name: NAME });
|
const lastScrape = await repository.getProvider({ name: NAME });
|
||||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||||
|
|
||||||
const latestTorrents = await getLatestTorrents();
|
return scrapeLatestTorrents()
|
||||||
return Promise.all(latestTorrents.map(torrent => entryLimiter.schedule(() => processTorrentRecord(torrent))))
|
|
||||||
.then(() => {
|
.then(() => {
|
||||||
lastScrape.lastScraped = scrapeStart;
|
lastScrape.lastScraped = scrapeStart;
|
||||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
|
||||||
return repository.updateProvider(lastScrape);
|
return repository.updateProvider(lastScrape);
|
||||||
})
|
})
|
||||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrents() {
|
async function scrapeLatestTorrents() {
|
||||||
const allowedCategories = [
|
const allowedCategories = [
|
||||||
rarbg.CATEGORY['4K_MOVIES_X264_4k'],
|
rarbg.CATEGORY['4K_MOVIES_X264_4k'],
|
||||||
rarbg.CATEGORY['4K_X265_4k'],
|
rarbg.CATEGORY['4K_X265_4k'],
|
||||||
@@ -48,11 +43,13 @@ async function getLatestTorrents() {
|
|||||||
rarbg.CATEGORY.TV_HD_EPISODES
|
rarbg.CATEGORY.TV_HD_EPISODES
|
||||||
];
|
];
|
||||||
|
|
||||||
return Promise.all(allowedCategories.map(category => limiter.schedule(() => getLatestTorrentsForCategory(category))))
|
return Promises.sequence(allowedCategories
|
||||||
|
.map(category => () => limiter.schedule(() => scrapeLatestTorrentsForCategory(category))))
|
||||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrentsForCategory(category) {
|
async function scrapeLatestTorrentsForCategory(category) {
|
||||||
|
console.log(`Scrapping ${NAME} ${category} category`);
|
||||||
return rarbg.list({ category: category, limit: 100, sort: 'last', format: 'json_extended', ranked: 0 })
|
return rarbg.list({ category: category, limit: 100, sort: 'last', format: 'json_extended', ranked: 0 })
|
||||||
.then(torrents => torrents.map(torrent => ({
|
.then(torrents => torrents.map(torrent => ({
|
||||||
name: torrent.title,
|
name: torrent.title,
|
||||||
@@ -65,7 +62,11 @@ async function getLatestTorrentsForCategory(category) {
|
|||||||
uploadDate: new Date(torrent.pubdate),
|
uploadDate: new Date(torrent.pubdate),
|
||||||
imdbId: torrent.episode_info && torrent.episode_info.imdb
|
imdbId: torrent.episode_info && torrent.episode_info.imdb
|
||||||
})))
|
})))
|
||||||
.catch((err) => []);
|
.then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t)))))
|
||||||
|
.catch(error => {
|
||||||
|
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||||
|
return Promise.resolve();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processTorrentRecord(record) {
|
async function processTorrentRecord(record) {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ const cheerio = require('cheerio');
|
|||||||
const needle = require('needle');
|
const needle = require('needle');
|
||||||
const moment = require('moment');
|
const moment = require('moment');
|
||||||
const decode = require('magnet-uri');
|
const decode = require('magnet-uri');
|
||||||
|
const Promises = require('../../lib/promises');
|
||||||
|
|
||||||
const defaultProxies = [
|
const defaultProxies = [
|
||||||
'https://thepiratebay.org',
|
'https://thepiratebay.org',
|
||||||
@@ -82,7 +83,7 @@ function torrent(torrentId, config = {}, retries = 2) {
|
|||||||
}
|
}
|
||||||
const proxyList = config.proxyList || defaultProxies;
|
const proxyList = config.proxyList || defaultProxies;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
||||||
.then((body) => parseTorrentPage(body))
|
.then((body) => parseTorrentPage(body))
|
||||||
.then((torrent) => ({ torrentId, ...torrent }))
|
.then((torrent) => ({ torrentId, ...torrent }))
|
||||||
@@ -97,7 +98,7 @@ function search(keyword, config = {}, retries = 2) {
|
|||||||
const page = config.page || 0;
|
const page = config.page || 0;
|
||||||
const category = config.category || 0;
|
const category = config.category || 0;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
||||||
.then((body) => parseBody(body))
|
.then((body) => parseBody(body))
|
||||||
.catch((err) => search(keyword, config, retries - 1));
|
.catch((err) => search(keyword, config, retries - 1));
|
||||||
@@ -111,7 +112,7 @@ function browse(config = {}, retries = 2) {
|
|||||||
const page = config.page || 0;
|
const page = config.page || 0;
|
||||||
const category = config.category || 0;
|
const category = config.category || 0;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config)))
|
.map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config)))
|
||||||
.then((body) => parseBody(body))
|
.then((body) => parseBody(body))
|
||||||
.catch((err) => browse(config, retries - 1));
|
.catch((err) => browse(config, retries - 1));
|
||||||
@@ -123,7 +124,7 @@ function dumps(config = {}, retries = 2) {
|
|||||||
}
|
}
|
||||||
const proxyList = config.proxyList || defaultProxies;
|
const proxyList = config.proxyList || defaultProxies;
|
||||||
|
|
||||||
return raceFirstSuccessful(proxyList
|
return Promises.first(proxyList
|
||||||
.map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config)
|
.map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config)
|
||||||
.then((body) => body.match(/(<a href="[^"]+">[^<]+<\/a>.+\d)/g)
|
.then((body) => body.match(/(<a href="[^"]+">[^<]+<\/a>.+\d)/g)
|
||||||
.map((group) => ({
|
.map((group) => ({
|
||||||
@@ -230,21 +231,4 @@ function parseSize(sizeText) {
|
|||||||
return Math.floor(parseFloat(sizeText) * scale);
|
return Math.floor(parseFloat(sizeText) * scale);
|
||||||
}
|
}
|
||||||
|
|
||||||
function raceFirstSuccessful(promises) {
|
|
||||||
return Promise.all(promises.map((p) => {
|
|
||||||
// If a request fails, count that as a resolution so it will keep
|
|
||||||
// waiting for other possible successes. If a request succeeds,
|
|
||||||
// treat it as a rejection so Promise.all immediately bails out.
|
|
||||||
return p.then(
|
|
||||||
(val) => Promise.reject(val),
|
|
||||||
(err) => Promise.resolve(err)
|
|
||||||
);
|
|
||||||
})).then(
|
|
||||||
// If '.all' resolved, we've just got an array of errors.
|
|
||||||
(errors) => Promise.reject(errors),
|
|
||||||
// If '.all' rejected, we've got the result we wanted.
|
|
||||||
(val) => Promise.resolve(val)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { torrent, search, browse, dumps, Categories };
|
module.exports = { torrent, search, browse, dumps, Categories };
|
||||||
|
|||||||
@@ -3,12 +3,8 @@ const Bottleneck = require('bottleneck');
|
|||||||
const thepiratebay = require('./thepiratebay_api.js');
|
const thepiratebay = require('./thepiratebay_api.js');
|
||||||
const { Type } = require('../../lib/types');
|
const { Type } = require('../../lib/types');
|
||||||
const repository = require('../../lib/repository');
|
const repository = require('../../lib/repository');
|
||||||
const {
|
const Promises = require('../../lib/promises');
|
||||||
createTorrentEntry,
|
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
|
||||||
createSkipTorrentEntry,
|
|
||||||
getStoredTorrentEntry,
|
|
||||||
updateTorrentSeeders
|
|
||||||
} = require('../../lib/torrentEntries');
|
|
||||||
|
|
||||||
const NAME = 'ThePirateBay';
|
const NAME = 'ThePirateBay';
|
||||||
const UNTIL_PAGE = 20;
|
const UNTIL_PAGE = 20;
|
||||||
@@ -33,27 +29,30 @@ async function scrape() {
|
|||||||
const lastScrape = await repository.getProvider({ name: NAME });
|
const lastScrape = await repository.getProvider({ name: NAME });
|
||||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||||
|
|
||||||
const latestTorrents = await getLatestTorrents();
|
return scrapeLatestTorrents()
|
||||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
|
||||||
.then(() => {
|
.then(() => {
|
||||||
lastScrape.lastScraped = scrapeStart;
|
lastScrape.lastScraped = scrapeStart;
|
||||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
|
||||||
return repository.updateProvider(lastScrape);
|
return repository.updateProvider(lastScrape);
|
||||||
})
|
})
|
||||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrents() {
|
async function scrapeLatestTorrents() {
|
||||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category)))
|
||||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLatestTorrentsForCategory(category, page = 0) {
|
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||||
|
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||||
return thepiratebay.browse(({ category, page }))
|
return thepiratebay.browse(({ category, page }))
|
||||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
||||||
: torrents)
|
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||||
.catch(() => []);
|
: Promise.resolve())
|
||||||
|
.catch(error => {
|
||||||
|
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||||
|
return Promise.resolve();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processTorrentRecord(record) {
|
async function processTorrentRecord(record) {
|
||||||
@@ -64,7 +63,7 @@ async function processTorrentRecord(record) {
|
|||||||
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
|
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
|
||||||
|
|
||||||
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
|
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
|
||||||
return createSkipTorrentEntry(record);
|
return Promise.resolve('Invalid torrent record');
|
||||||
}
|
}
|
||||||
|
|
||||||
const torrent = {
|
const torrent = {
|
||||||
|
|||||||
Reference in New Issue
Block a user