mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
[scraper] adds yts scraper
This commit is contained in:
@@ -5,6 +5,7 @@ const schedule = require('node-schedule');
|
||||
const { connect, getUpdateSeedersTorrents } = require('./lib/repository');
|
||||
const thepiratebayScraper = require('./scrapers/thepiratebay/thepiratebay_scraper');
|
||||
const horribleSubsScraper = require('./scrapers/horriblesubs/horriblesubs_scraper');
|
||||
const ytsScraper = require('./scrapers/yts/yts_scraper');
|
||||
const leetxScraper = require('./scrapers/1337x/1337x_scraper');
|
||||
const kickassScraper = require('./scrapers/kickass/kickass_scraper');
|
||||
const rarbgScraper = require('./scrapers/rarbg/rarbg_scraper');
|
||||
@@ -15,6 +16,7 @@ const thepiratebayUnofficialDumpScraper = require('./scrapers/thepiratebay/thepi
|
||||
const PROVIDERS = [
|
||||
// require('./scrapers/thepiratebay/thepiratebay_update_size_scraper')
|
||||
// require('./scrapers/1337x/1337x_dump_scraper')
|
||||
ytsScraper,
|
||||
horribleSubsScraper,
|
||||
rarbgScraper,
|
||||
thepiratebayScraper,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Exxecute promises in sequence one after another.
|
||||
* Execute promises in sequence one after another.
|
||||
*/
|
||||
async function sequence(promises) {
|
||||
return promises.reduce((promise, func) =>
|
||||
@@ -9,7 +9,7 @@ async function sequence(promises) {
|
||||
/**
|
||||
* Return first resolved promise as the result.
|
||||
*/
|
||||
function first(promises) {
|
||||
async function first(promises) {
|
||||
return Promise.all(promises.map((p) => {
|
||||
// If a request fails, count that as a resolution so it will keep
|
||||
// waiting for other possible successes. If a request succeeds,
|
||||
@@ -26,4 +26,11 @@ function first(promises) {
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { sequence, first };
|
||||
/**
|
||||
* Delay promise
|
||||
*/
|
||||
async function delay(duration) {
|
||||
return new Promise((resolve) => setTimeout(resolve, duration));
|
||||
}
|
||||
|
||||
module.exports = { sequence, first, delay };
|
||||
92
scraper/scrapers/yts/yts_api.js
Normal file
92
scraper/scrapers/yts/yts_api.js
Normal file
@@ -0,0 +1,92 @@
|
||||
const needle = require('needle');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { getRandomUserAgent } = require('./../../lib/request_helper');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://yts.mx'
|
||||
];
|
||||
const defaultTimeout = 30000;
|
||||
const limit = 50;
|
||||
|
||||
function torrent(torrentId, config = {}, retries = 2) {
|
||||
if (!torrentId || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} search`));
|
||||
}
|
||||
|
||||
return Promises.first(defaultProxies
|
||||
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/movie_details.json?movie_id=${torrentId}`, config)))
|
||||
.then(body => parseResults(body))
|
||||
.catch(error => torrent(torrentId, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(query, config = {}, retries = 2) {
|
||||
if (!query || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${query} search`));
|
||||
}
|
||||
|
||||
return Promises.first(defaultProxies
|
||||
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}&query_term=${query}`, config)))
|
||||
.then(results => parseResults(results))
|
||||
.catch(error => search(query, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed browse request`));
|
||||
}
|
||||
const page = config.page || 1;
|
||||
|
||||
return Promises.first(defaultProxies
|
||||
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}&page=${page}`, config)))
|
||||
.then(results => parseResults(results))
|
||||
.catch(error => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
const options = { userAgent: getRandomUserAgent(), open_timeout: timeout, follow: 2 };
|
||||
|
||||
return needle('get', requestUrl, options)
|
||||
.then(response => {
|
||||
if (!response.body) {
|
||||
return Promise.reject(`No body: ${requestUrl}`);
|
||||
}
|
||||
return Promise.resolve(response.body);
|
||||
});
|
||||
}
|
||||
|
||||
function parseResults(results) {
|
||||
if (!results || !results.data || (!results.data.movie && !Array.isArray(results.data.movies))) {
|
||||
console.log('Incorrect results: ', results);
|
||||
return Promise.reject('Incorrect results')
|
||||
}
|
||||
return (results.data.movies || [results.data.movie])
|
||||
.filter(movie => Array.isArray(movie.torrents))
|
||||
.map(movie => parseMovie(movie))
|
||||
.reduce((a, b) => a.concat(b), []);
|
||||
}
|
||||
|
||||
function parseMovie(movie) {
|
||||
return movie.torrents.map(torrent => ({
|
||||
name: `${movie.title} ${movie.year} ${torrent.quality} ${formatType(torrent.type)} `,
|
||||
torrentId: movie.id,
|
||||
infoHash: torrent.hash.trim().toLowerCase(),
|
||||
torrentLink: torrent.url,
|
||||
seeders: torrent.seeds,
|
||||
size: torrent.size_bytes,
|
||||
uploadDate: new Date(torrent.date_uploaded_unix * 1000),
|
||||
imdbId: movie.imdb_code
|
||||
}));
|
||||
}
|
||||
|
||||
function formatType(type) {
|
||||
if (type === 'web') {
|
||||
return 'WEBRip';
|
||||
}
|
||||
if (type === 'bluray') {
|
||||
return 'BluRay';
|
||||
}
|
||||
return type.toUpperCase();
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse };
|
||||
73
scraper/scrapers/yts/yts_scraper.js
Normal file
73
scraper/scrapers/yts/yts_scraper.js
Normal file
@@ -0,0 +1,73 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const yts = require('./yts_api');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const { createTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'YTS';
|
||||
const UNTIL_PAGE = 2;
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 20 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
return scrapeLatestTorrents()
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return lastScrape.save();
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function updateSeeders(torrent) {
|
||||
return limiter.schedule(() => yts.torrent(torrent.torrentId)
|
||||
.then(records => records.map(record => ({ ...record, provider: NAME })))
|
||||
.then(records => Promise.all(records.map(record => updateTorrentSeeders(record)))));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrents() {
|
||||
return scrapeLatestTorrentsForCategory();
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrentsForCategory(page = 1) {
|
||||
console.log(`Scrapping ${NAME} page ${page}`);
|
||||
return yts.browse(({ page }))
|
||||
.catch(error => {
|
||||
console.warn(`Failed ${NAME} scrapping for [${page}] due: `, error);
|
||||
return Promise.resolve([]);
|
||||
})
|
||||
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
||||
? scrapeLatestTorrentsForCategory(page + 1)
|
||||
: Promise.resolve());
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
if (!record || !record.size) {
|
||||
return Promise.resolve('Invalid torrent record');
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: record.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: record.torrentId,
|
||||
title: record.name.replace(/\t|\s+/g, ' ').trim(),
|
||||
type: Type.MOVIE,
|
||||
size: record.size,
|
||||
seeders: record.seeders,
|
||||
uploadDate: record.uploadDate,
|
||||
imdbId: record.imdbId,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent).then(() => torrent);
|
||||
}
|
||||
|
||||
module.exports = { scrape, updateSeeders, NAME };
|
||||
Reference in New Issue
Block a user