remove scraper module

This commit is contained in:
TheBeastLT
2022-06-16 17:44:38 +03:00
parent a253e8e0b0
commit 554c07d636
69 changed files with 0 additions and 61771 deletions

View File

@@ -1,127 +0,0 @@
const axios = require('axios');
const cheerio = require("cheerio");
const decode = require("magnet-uri");
const { getRandomUserAgent } = require("../../lib/requestHelper");
const defaultTimeout = 10000;
const maxSearchPage = 50;
const baseUrl = 'https://darkmahou.com';
const Categories = {
MOVIE: 'movie',
ANIME: 'tv',
OVA: 'ova'
};
function torrent(torrentId, config = {}, retries = 2) {
if (!torrentId || retries === 0) {
return Promise.reject(new Error(`Failed ${torrentId} query`));
}
const slug = torrentId.split("/")[3];
return singleRequest(`${baseUrl}/${slug}`, config)
.then((body) => parseTorrentPage(body))
.then((torrent) => torrent.map((el) => ({ torrentId: slug, ...el })))
.catch((err) => torrent(slug, config, retries - 1));
}
function search(keyword, config = {}, retries = 2) {
if (!keyword || retries === 0) {
return Promise.reject(new Error(`Failed ${keyword} search`));
}
const page = config.page || 1;
const extendToPage = Math.min(maxSearchPage, config.extendToPage || 1);
return singleRequest(`${baseUrl}/page/${page}/?s=${keyword}`, config)
.then((body) => parseTableBody(body))
.then((torrents) =>
torrents.length === 40 && page < extendToPage
? search(keyword, { ...config, page: page + 1 })
.catch(() => [])
.then((nextTorrents) => torrents.concat(nextTorrents))
: torrents
)
.catch((err) => search(keyword, config, retries - 1));
}
function browse(config = {}, retries = 2) {
if (retries === 0) {
return Promise.reject(new Error(`Failed browse request`));
}
const page = config.page || 1;
const category = config.category;
const requestUrl = category ? `${baseUrl}/category/${category}/page/${page}/` : `${baseUrl}/page/${page}/`;
return singleRequest(requestUrl, config)
.then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1));
}
function singleRequest(requestUrl, config = {}) {
const timeout = config.timeout || defaultTimeout;
const options = { headers: { 'User-Agent': getRandomUserAgent() }, timeout: timeout };
return axios.get(requestUrl, options).then((response) => {
const body = response.data;
if (!body) {
throw new Error(`No body: ${requestUrl}`);
} else if (
body.includes("502: Bad gateway") ||
body.includes("403 Forbidden")
) {
throw new Error(`Invalid body contents: ${requestUrl}`);
}
return body;
});
}
function parseTableBody(body) {
return new Promise((resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error("Failed loading body"));
}
const torrents = [];
$("article.bs").each((i, element) => {
const row = $(element);
torrents.push({
name: row.find("span.ntitle").text(),
torrentId: row.find("div > a").attr("href"),
});
});
resolve(torrents);
});
}
function parseTorrentPage(body) {
return new Promise(async (resolve, reject) => {
const $ = cheerio.load(body);
if (!$) {
reject(new Error("Failed loading body"));
}
let magnets = [];
$(`a[href^="magnet"]`).each((i, section) => {
const magnet = $(section).attr("href");
magnets.push(magnet);
});
const details = $('div.infox')
const torrent = magnets.map((magnetLink) => {
return {
title: decode(magnetLink).name,
originalName: details.find('h1.entry-title').text(),
year: details.find('b:contains(\'Lançado:\')')[0].nextSibling.nodeValue || '',
infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink,
category: details.find('b:contains(\'Tipo:\')').next().attr('href').split('/')[4],
uploadDate: new Date($("time[itemprop=dateModified]").attr("datetime")),
};
})
resolve(torrent.filter((x) => x));
});
}
module.exports = { torrent, search, browse, Categories };

View File

@@ -1,108 +0,0 @@
const moment = require("moment");
const Bottleneck = require("bottleneck");
const darkmahou = require("./darkmahou_api");
const { Type } = require("../../lib/types");
const repository = require("../../lib/repository");
const Promises = require("../../lib/promises");
const { createTorrentEntry, checkAndUpdateTorrent } = require("../../lib/torrentEntries");
const { updateCurrentSeeders, updateTorrentSize } = require("../../lib/torrent");
const { getKitsuId } = require("../../lib/metadata");
const NAME = "DarkMahou";
const UNTIL_PAGE = 5;
const limiter = new Bottleneck({ maxConcurrent: 5 });
async function scrape() {
const scrapeStart = moment();
const lastScrape = await repository.getProvider({ name: NAME });
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
return scrapeLatestTorrents()
.then(() => {
lastScrape.lastScraped = scrapeStart;
return lastScrape.save();
})
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
}
async function updateSeeders(torrent) {
return limiter.schedule(() => darkmahou.torrent(torrent.torrentId));
}
async function scrapeLatestTorrents() {
const allowedCategories = [
darkmahou.Categories.MOVIE,
darkmahou.Categories.ANIME,
darkmahou.Categories.OVA
];
return Promises.sequence(allowedCategories
.map((category) => () => scrapeLatestTorrentsForCategory(category)))
.then((entries) => entries.reduce((a, b) => a.concat(b), []));
}
async function scrapeLatestTorrentsForCategory(category, page = 1) {
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
return darkmahou
.browse({ category, page })
.catch((error) => {
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
return Promise.resolve([]);
})
.then((torrents) => Promise.all(torrents.map((torrent) => limiter.schedule(() => processEntry(torrent)))))
.then((resolved) => resolved.length > 0 && page < untilPage(category)
? scrapeLatestTorrentsForCategory(category, page + 1)
: Promise.resolve());
}
async function processEntry(entry) {
return darkmahou.torrent(entry.torrentId)
.then(records => Promises.sequence(records.map(record => () => processTorrentRecord(record))))
.catch(() => undefined);
}
async function processTorrentRecord(foundTorrent) {
if (await checkAndUpdateTorrent({ provider: NAME, ...foundTorrent })) {
return foundTorrent;
}
if (!foundTorrent.size) {
await updateTorrentSize(foundTorrent);
}
if (!Number.isInteger(foundTorrent.seeders)) {
await updateCurrentSeeders(foundTorrent);
}
if (!foundTorrent.imdbId && !foundTorrent.kitsuId) {
const info = { title: foundTorrent.originalName, year: foundTorrent.year };
foundTorrent.kitsuId = await getKitsuId(info).catch(() => undefined);
}
const torrent = {
infoHash: foundTorrent.infoHash,
provider: NAME,
torrentId: foundTorrent.torrentId,
title: foundTorrent.title,
type: Type.ANIME,
imdbId: foundTorrent.imdbId,
kitsuId: foundTorrent.kitsuId,
uploadDate: foundTorrent.uploadDate,
seeders: foundTorrent.seeders,
size: foundTorrent.size,
files: foundTorrent.files,
languages: foundTorrent.languages
};
return createTorrentEntry(torrent);
}
function untilPage(category) {
if (darkmahou.Categories.ANIME === category) {
return 5;
}
if (darkmahou.Categories.OVA === category) {
return 4;
}
return UNTIL_PAGE;
}
module.exports = { scrape, updateSeeders, NAME };