format darkmahou provider

This commit is contained in:
TheBeastLT
2021-09-14 15:14:11 +02:00
committed by TheBeastLT
parent 50882c30a9
commit c17627e583
2 changed files with 93 additions and 118 deletions

View File

@@ -8,13 +8,11 @@ const { getRandomUserAgent } = require("../../lib/requestHelper");
const defaultTimeout = 10000; const defaultTimeout = 10000;
const maxSearchPage = 50; const maxSearchPage = 50;
const limiter = new Bottleneck({ maxConcurrent: 10 }); const defaultProxies = ['https://darkmahou.com'];
const defaultProxies = ["https://darkmahou.com"];
const Categories = { const Categories = {
MOVIE: "movie", MOVIE: 'movie',
ANIME: "tv", ANIME: 'tv',
OVA: 'ova' OVA: 'ova'
}; };
@@ -25,11 +23,11 @@ function torrent(torrentId, config = {}, retries = 2) {
const proxyList = config.proxyList || defaultProxies; const proxyList = config.proxyList || defaultProxies;
const slug = torrentId.split("/")[3]; const slug = torrentId.split("/")[3];
return Promises.first( return Promises.first(
proxyList.map((proxyUrl) => singleRequest(`${proxyUrl}/${slug}`, config)) proxyList.map((proxyUrl) => singleRequest(`${proxyUrl}/${slug}`, config))
) )
.then((body) => parseTorrentPage(body)) .then((body) => parseTorrentPage(body))
.then((torrent) => torrent.map((el) => ({ torrentId: slug, ...el }))) .then((torrent) => torrent.map((el) => ({ torrentId: slug, ...el })))
.catch((err) => torrent(slug, config, retries - 1)); .catch((err) => torrent(slug, config, retries - 1));
} }
function search(keyword, config = {}, retries = 2) { function search(keyword, config = {}, retries = 2) {
@@ -42,17 +40,17 @@ function search(keyword, config = {}, retries = 2) {
const requestUrl = (proxyUrl) => `${proxyUrl}/page/${page}/?s=${keyword}`; const requestUrl = (proxyUrl) => `${proxyUrl}/page/${page}/?s=${keyword}`;
return Promises.first( return Promises.first(
proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)) proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config))
) )
.then((body) => parseTableBody(body)) .then((body) => parseTableBody(body))
.then((torrents) => .then((torrents) =>
torrents.length === 40 && page < extendToPage torrents.length === 40 && page < extendToPage
? search(keyword, { ...config, page: page + 1 }) ? search(keyword, { ...config, page: page + 1 })
.catch(() => []) .catch(() => [])
.then((nextTorrents) => torrents.concat(nextTorrents)) .then((nextTorrents) => torrents.concat(nextTorrents))
: torrents : torrents
) )
.catch((err) => search(keyword, config, retries - 1)); .catch((err) => search(keyword, config, retries - 1));
} }
function browse(config = {}, retries = 2) { function browse(config = {}, retries = 2) {
@@ -63,15 +61,15 @@ function browse(config = {}, retries = 2) {
const page = config.page || 1; const page = config.page || 1;
const category = config.category; const category = config.category;
const requestUrl = (proxyUrl) => const requestUrl = (proxyUrl) =>
category category
? `${proxyUrl}/category/${category}/page/${page}/` ? `${proxyUrl}/category/${category}/page/${page}/`
: `${proxyUrl}/page/${page}/`; : `${proxyUrl}/page/${page}/`;
return Promises.first( return Promises.first(
proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)) proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config))
) )
.then((body) => parseTableBody(body)) .then((body) => parseTableBody(body))
.catch((err) => browse(config, retries - 1)); .catch((err) => browse(config, retries - 1));
} }
function singleRequest(requestUrl, config = {}) { function singleRequest(requestUrl, config = {}) {
@@ -87,8 +85,8 @@ function singleRequest(requestUrl, config = {}) {
if (!body) { if (!body) {
throw new Error(`No body: ${requestUrl}`); throw new Error(`No body: ${requestUrl}`);
} else if ( } else if (
body.includes("502: Bad gateway") || body.includes("502: Bad gateway") ||
body.includes("403 Forbidden") body.includes("403 Forbidden")
) { ) {
throw new Error(`Invalid body contents: ${requestUrl}`); throw new Error(`Invalid body contents: ${requestUrl}`);
} }
@@ -118,7 +116,7 @@ function parseTableBody(body) {
} }
function parseTorrentPage(body) { function parseTorrentPage(body) {
return new Promise(async(resolve, reject) => { return new Promise(async (resolve, reject) => {
const $ = cheerio.load(body); const $ = cheerio.load(body);
if (!$) { if (!$) {
@@ -133,8 +131,8 @@ function parseTorrentPage(body) {
const torrent = magnets.map((magnetLink) => { const torrent = magnets.map((magnetLink) => {
return { return {
title: decode(magnetLink).name, title: decode(magnetLink).name,
original_name: details.find('h1.entry-title').text(), originalName: details.find('h1.entry-title').text(),
year: details.find('b:contains(\'Lançado:\')')[0].nextSibling.nodeValue || '', year: details.find('b:contains(\'Lançado:\')')[0].nextSibling.nodeValue || '',
infoHash: decode(magnetLink).infoHash, infoHash: decode(magnetLink).infoHash,
magnetLink: magnetLink, magnetLink: magnetLink,
category: details.find('b:contains(\'Tipo:\')').next().attr('href').split('/')[4], category: details.find('b:contains(\'Tipo:\')').next().attr('href').split('/')[4],

View File

@@ -1,16 +1,15 @@
const moment = require("moment"); const moment = require("moment");
const Bottleneck = require("bottleneck"); const Bottleneck = require("bottleneck");
const leetx = require("./darkmahou_api"); const darkmahou = require("./darkmahou_api");
const { Type } = require("../../lib/types"); const { Type } = require("../../lib/types");
const repository = require("../../lib/repository"); const repository = require("../../lib/repository");
const Promises = require("../../lib/promises"); const Promises = require("../../lib/promises");
const { createTorrentEntry, checkAndUpdateTorrent } = require("../../lib/torrentEntries"); const { createTorrentEntry, checkAndUpdateTorrent } = require("../../lib/torrentEntries");
const { updateCurrentSeeders, updateTorrentSize } = require("../../lib/torrent"); const { updateCurrentSeeders, updateTorrentSize } = require("../../lib/torrent");
const { getImdbId } = require("../../lib/metadata"); const { getKitsuId } = require("../../lib/metadata");
const NAME = "DarkMahou"; const NAME = "DarkMahou";
const UNTIL_PAGE = 5; const UNTIL_PAGE = 5;
const TYPE_MAPPING = typeMapping();
const limiter = new Bottleneck({ maxConcurrent: 5 }); const limiter = new Bottleneck({ maxConcurrent: 5 });
@@ -20,109 +19,87 @@ async function scrape() {
console.log(`[${scrapeStart}] starting ${NAME} scrape...`); console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
return scrapeLatestTorrents() return scrapeLatestTorrents()
.then(() => { .then(() => {
lastScrape.lastScraped = scrapeStart; lastScrape.lastScraped = scrapeStart;
return lastScrape.save(); return lastScrape.save();
}) })
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`)); .then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
} }
async function updateSeeders(torrent) { async function updateSeeders(torrent) {
return limiter.schedule(() => leetx.torrent(torrent.torrentId)); return limiter.schedule(() => darkmahou.torrent(torrent.torrentId));
} }
async function scrapeLatestTorrents() { async function scrapeLatestTorrents() {
const allowedCategories = [ const allowedCategories = [
leetx.Categories.MOVIE, darkmahou.Categories.MOVIE,
leetx.Categories.ANIME, darkmahou.Categories.ANIME,
leetx.Categories.OVA darkmahou.Categories.OVA
]; ];
return Promises.sequence( return Promises.sequence(allowedCategories
allowedCategories.map( .map((category) => () => scrapeLatestTorrentsForCategory(category)))
(category) => () => scrapeLatestTorrentsForCategory(category) .then((entries) => entries.reduce((a, b) => a.concat(b), []));
)
).then((entries) => entries.reduce((a, b) => a.concat(b), []));
} }
async function scrapeLatestTorrentsForCategory(category, page = 1) { async function scrapeLatestTorrentsForCategory(category, page = 1) {
console.log({Scraper: `Scrapping ${NAME} ${category} category page ${page}`}); console.log(`Scrapping ${NAME} ${category} category page ${page}`);
return leetx return darkmahou
.browse({ category, page }) .browse({ category, page })
.catch((error) => { .catch((error) => {
console.warn( console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
`Failed ${NAME} scrapping for [${page}] ${category} due: `, return Promise.resolve([]);
error })
); .then((torrents) => Promise.all(torrents.map((torrent) => limiter.schedule(() => processEntry(torrent)))))
return Promise.resolve([]); .then((resolved) => resolved.length > 0 && page < untilPage(category)
}) ? scrapeLatestTorrentsForCategory(category, page + 1)
.then((torrents) => Promise.all(torrents.map((torrent) => limiter.schedule(() => processTorrentRecord(torrent))))) : Promise.resolve());
.then((resolved) => resolved.length > 0 && page < untilPage(category) ? scrapeLatestTorrentsForCategory(category, page + 1) : Promise.resolve());
} }
async function processTorrentRecord(record) { async function processEntry(entry) {
if (await checkAndUpdateTorrent({ provider: NAME, ...record })) { return darkmahou.torrent(entry.torrentId)
return record; .then(records => Promises.sequence(records.map(record => () => processTorrentRecord(record))))
} .catch(() => undefined);
const torrentEntrys = await leetx
.torrent(record.torrentId)
.catch(() => undefined);
if (torrentEntrys === undefined) {
return Promise.resolve([])
}
return Promise.allSettled(
torrentEntrys.map(async (torrentFound) => {
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
return Promise.resolve("Invalid torrent record");
}
if (isNaN(torrentFound.uploadDate)) {
console.warn(
`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`
);
return;
}
if (await checkAndUpdateTorrent(torrentFound)) {
return torrentFound;
}
if (!torrentFound.size) {
await updateTorrentSize(torrentFound)
.catch((err) => Promise.resolve(err))
}
if (!torrentFound.seeders) {
await updateCurrentSeeders(torrentFound)
.then(response => response.seeders === 0 ? delete response.seeders : response)
}
if (!torrentFound.imdbId) {
torrentFound.imdbId = await getImdbId(torrentFound.original_name, torrentFound.year, TYPE_MAPPING[torrentFound.category])
}
const torrent = {
infoHash: torrentFound.infoHash,
provider: NAME,
torrentId: torrentFound.torrentId,
title: torrentFound.title.replace(/\t|\s+/g, " ").trim(),
type: Type.ANIME,
imdbId: torrentFound.imdbId,
uploadDate: torrentFound.uploadDate,
seeders: torrentFound.seeders,
};
return createTorrentEntry(torrent);
})
);
} }
function typeMapping() { async function processTorrentRecord(foundTorrent) {
const mapping = {}; if (await checkAndUpdateTorrent({ provider: NAME, ...foundTorrent })) {
mapping[leetx.Categories.MOVIE] = Type.MOVIE; return foundTorrent;
mapping[leetx.Categories.ANIME] = Type.SERIES; }
mapping[leetx.Categories.OVA] = Type.ANIME
return mapping; if (!foundTorrent.size) {
await updateTorrentSize(foundTorrent);
}
if (!Number.isInteger(foundTorrent.seeders)) {
await updateCurrentSeeders(foundTorrent);
}
if (!foundTorrent.imdbId && !foundTorrent.kitsuId) {
const info = { title: foundTorrent.originalName, year: foundTorrent.year };
foundTorrent.kitsuId = await getKitsuId(info).catch(() => undefined);
}
const torrent = {
infoHash: foundTorrent.infoHash,
provider: NAME,
torrentId: foundTorrent.torrentId,
title: foundTorrent.title,
type: Type.ANIME,
imdbId: foundTorrent.imdbId,
kitsuId: foundTorrent.kitsuId,
uploadDate: foundTorrent.uploadDate,
seeders: foundTorrent.seeders,
size: foundTorrent.size,
files: foundTorrent.files,
languages: foundTorrent.languages
};
return createTorrentEntry(torrent);
} }
function untilPage(category) { function untilPage(category) {
if (leetx.Categories.ANIME === category) { if (darkmahou.Categories.ANIME === category) {
return 5; return 5;
} }
if (leetx.Categories.OVA === category) { if (darkmahou.Categories.OVA === category) {
return 4; return 4;
} }
return UNTIL_PAGE; return UNTIL_PAGE;