remove scraper module

This commit is contained in:
TheBeastLT
2022-06-16 17:44:38 +03:00
parent a253e8e0b0
commit 554c07d636
69 changed files with 0 additions and 61771 deletions

View File

@@ -1,98 +0,0 @@
const axios = require('axios');
const Promises = require('../../lib/promises');
const { getRandomUserAgent } = require('./../../lib/requestHelper');
const defaultProxies = [
'https://yts.mx'
];
const defaultTimeout = 30000;
const limit = 50;
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} search`));
}
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/movie_details.json?movie_id=${torrentId}`, config)))
.then(body => parseResults(body))
.catch(error => torrent(torrentId, config, retries - 1));
}
function search(query, config = {}, retries = 2) {
if (!query || retries === 0) {
return Promise.reject(new Error(`Failed ${query} search`));
}
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}&query_term=${query}`, config)))
.then(results => parseResults(results))
.catch(error => search(query, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const page = config.page || 1;
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}&page=${page}`, config)))
.then(results => parseResults(results))
.catch(error => browse(config, retries - 1));
}
function maxPage() {
return Promises.first(defaultProxies
.map(proxyUrl => singleRequest(`${proxyUrl}/api/v2/list_movies.json?limit=${limit}`)))
.then(results => Math.round((results?.data?.movie_count || 0) / limit))
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
return axios.get(requestUrl, options)
.then(response => {
if (!response.data) {
return Promise.reject(`No body: ${requestUrl}`);
}
return Promise.resolve(response.data);
});
}
function parseResults(results) {
if (!results || !results.data || (!results.data.movie && !Array.isArray(results.data.movies))) {
console.log('Incorrect results: ', results);
return Promise.reject('Incorrect results')
}
return (results.data.movies || [results.data.movie])
.filter(movie => Array.isArray(movie.torrents))
.map(movie => parseMovie(movie))
.reduce((a, b) => a.concat(b), []);
}
function parseMovie(movie) {
return movie.torrents.map(torrent => ({
name: `${movie.title} ${movie.year} ${torrent.quality} ${formatType(torrent.type)} `,
torrentId: `${movie.id}-${torrent.hash.trim().toLowerCase()}`,
infoHash: torrent.hash.trim().toLowerCase(),
torrentLink: torrent.url,
seeders: torrent.seeds,
size: torrent.size_bytes,
uploadDate: new Date(torrent.date_uploaded_unix * 1000),
imdbId: movie.imdb_code
}));
}
function formatType(type) {
if (type === 'web') {
return 'WEBRip';
}
if (type === 'bluray') {
return 'BluRay';
}
return type.toUpperCase();
}
module.exports = { torrent, search, browse, maxPage };

View File

@@ -1,15 +0,0 @@
const moment = require("moment");
const yts = require('./yts_api');
const scraper = require('./yts_scraper')
async function scrape() {
const scrapeStart = moment();
console.log(`[${scrapeStart}] starting ${scraper.NAME} full scrape...`);
return yts.maxPage()
.then(maxPage => scraper.scrape(maxPage))
.then(() => console.log(`[${moment()}] finished ${scraper.NAME} full scrape`));
}
module.exports = { scrape, NAME: scraper.NAME };

View File

@@ -1,67 +0,0 @@
const moment = require('moment');
const Bottleneck = require('bottleneck');
const yts = require('./yts_api');
const { Type } = require('../../lib/types');
const repository = require('../../lib/repository');
const { createTorrentEntry, checkAndUpdateTorrent } = require('../../lib/torrentEntries');
const NAME = 'YTS';
const UNTIL_PAGE = 10;
const limiter = new Bottleneck({ maxConcurrent: 10 });
async function scrape(maxPage) {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
return scrapeLatestTorrentsForCategory(maxPage)
.then(() => {
lastScrape.lastScraped = scrapeStart;
return lastScrape.save();
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function updateSeeders(torrent) {
return limiter.schedule(() => yts.torrent(torrent.torrentId));
}
async function scrapeLatestTorrentsForCategory(maxPage = UNTIL_PAGE, page = 1) {
console.log(`Scrapping ${NAME} page ${page}`);
return yts.browse(({ page }))
.catch(error => {
console.warn(`Failed ${NAME} scrapping for [${page}] due: `, error);
return Promise.resolve([]);
})
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)))))
.then(resolved => resolved.length > 0 && page < maxPage
? scrapeLatestTorrentsForCategory(maxPage, page + 1)
: Promise.resolve());
}
async function processTorrentRecord(record) {
if (await checkAndUpdateTorrent(record)) {
return record;
}
if (!record || !record.size) {
return Promise.resolve('Invalid torrent record');
}
const torrent = {
infoHash: record.infoHash,
provider: NAME,
torrentId: record.torrentId,
title: record.name.replace(/\t|\s+/g, ' ').trim(),
type: Type.MOVIE,
size: record.size,
seeders: record.seeders,
uploadDate: record.uploadDate,
imdbId: record.imdbId,
};
return createTorrentEntry(torrent).then(() => torrent);
}
module.exports = { scrape, updateSeeders, NAME };