format animestorrent provider
This commit is contained in:
@@ -8,12 +8,12 @@ const defaultTimeout = 10000;
|
||||
const maxSearchPage = 50;
|
||||
|
||||
const defaultProxies = [
|
||||
"https://animestorrent.com"
|
||||
'https://animestorrent.com'
|
||||
];
|
||||
|
||||
const Categories = {
|
||||
MOVIE: "filme",
|
||||
ANIME: "tv",
|
||||
MOVIE: 'filme',
|
||||
ANIME: 'tv',
|
||||
OVA: 'ova'
|
||||
};
|
||||
|
||||
@@ -23,12 +23,10 @@ function torrent(torrentId, config = {}, retries = 2) {
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const slug = torrentId.split("/")[3];
|
||||
return Promises.first(
|
||||
proxyList.map((proxyUrl) => singleRequest(`${proxyUrl}/${slug}`, config))
|
||||
)
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => torrent.map((el) => ({ torrentId: slug, ...el })))
|
||||
.catch((err) => torrent(slug, config, retries - 1));
|
||||
return Promises.first(proxyList.map((proxyUrl) => singleRequest(`${proxyUrl}/${slug}`, config)))
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => torrent.map((el) => ({ torrentId: slug, ...el })))
|
||||
.catch((err) => torrent(slug, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(keyword, config = {}, retries = 2) {
|
||||
@@ -40,18 +38,15 @@ function search(keyword, config = {}, retries = 2) {
|
||||
const extendToPage = Math.min(maxSearchPage, config.extendToPage || 1);
|
||||
const requestUrl = (proxyUrl) => `${proxyUrl}/page/${page}/?s=${keyword}`;
|
||||
|
||||
return Promises.first(
|
||||
proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config))
|
||||
)
|
||||
.then((body) => parseTableBody(body))
|
||||
.then((torrents) =>
|
||||
torrents.length === 40 && page < extendToPage
|
||||
? search(keyword, { ...config, page: page + 1 })
|
||||
.catch(() => [])
|
||||
.then((nextTorrents) => torrents.concat(nextTorrents))
|
||||
: torrents
|
||||
)
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
return Promises.first(proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.then((torrents) =>
|
||||
torrents.length === 40 && page < extendToPage
|
||||
? search(keyword, { ...config, page: page + 1 })
|
||||
.catch(() => [])
|
||||
.then((nextTorrents) => torrents.concat(nextTorrents))
|
||||
: torrents)
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
@@ -62,15 +57,13 @@ function browse(config = {}, retries = 2) {
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
const requestUrl = (proxyUrl) =>
|
||||
category
|
||||
? `${proxyUrl}/tipo/${category}/page/${page}/`
|
||||
: `${proxyUrl}/page/${page}/`;
|
||||
category
|
||||
? `${proxyUrl}/tipo/${category}/page/${page}/`
|
||||
: `${proxyUrl}/page/${page}/`;
|
||||
|
||||
return Promises.first(
|
||||
proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config))
|
||||
)
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
return Promises.first(proxyList.map((proxyUrl) => singleRequest(requestUrl(proxyUrl), config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
@@ -86,8 +79,8 @@ function singleRequest(requestUrl, config = {}) {
|
||||
if (!body) {
|
||||
throw new Error(`No body: ${requestUrl}`);
|
||||
} else if (
|
||||
body.includes("502: Bad gateway") ||
|
||||
body.includes("403 Forbidden")
|
||||
body.includes("502: Bad gateway") ||
|
||||
body.includes("403 Forbidden")
|
||||
) {
|
||||
throw new Error(`Invalid body contents: ${requestUrl}`);
|
||||
}
|
||||
@@ -117,7 +110,7 @@ function parseTableBody(body) {
|
||||
}
|
||||
|
||||
function parseTorrentPage(body) {
|
||||
return new Promise(async(resolve, reject) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
@@ -129,18 +122,20 @@ function parseTorrentPage(body) {
|
||||
magnets.push(magnet);
|
||||
});
|
||||
const details = $('div.infox')
|
||||
const torrent = magnets.map((magnetLink) => {
|
||||
const torrents = magnets.map((magnetLink) => {
|
||||
return {
|
||||
title: decode(magnetLink).name,
|
||||
original_name: details.find('h1.entry-title').text(),
|
||||
year: details.find('b:contains(\'Lançamento:\')')[0] ? details.find('b:contains(\'Lançamento:\')')[0].nextSibling.nodeValue.trim() : '',
|
||||
originalName: details.find('h1.entry-title').text(),
|
||||
year: details.find('b:contains(\'Lançamento:\')')[0]
|
||||
? details.find('b:contains(\'Lançamento:\')')[0].nextSibling.nodeValue.trim()
|
||||
: '',
|
||||
infoHash: decode(magnetLink).infoHash,
|
||||
magnetLink: magnetLink,
|
||||
category: details.find('b:contains(\'Tipo:\')').next().attr('href').split('/')[4],
|
||||
uploadDate: new Date($("time[itemprop=dateModified]").attr("datetime")),
|
||||
};
|
||||
})
|
||||
resolve(torrent.filter((x) => x));
|
||||
resolve(torrents);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
const moment = require("moment");
|
||||
const Bottleneck = require("bottleneck");
|
||||
const leetx = require("./animestorrent_api");
|
||||
const animetorrrent = require("./animestorrent_api");
|
||||
const { Type } = require("../../lib/types");
|
||||
const repository = require("../../lib/repository");
|
||||
const Promises = require("../../lib/promises");
|
||||
const { createTorrentEntry, checkAndUpdateTorrent } = require("../../lib/torrentEntries");
|
||||
const { updateCurrentSeeders, updateTorrentSize } = require("../../lib/torrent");
|
||||
const { getImdbId } = require("../../lib/metadata");
|
||||
const { getKitsuId } = require("../../lib/metadata");
|
||||
|
||||
const NAME = "AnimesTorrent";
|
||||
const UNTIL_PAGE = 5;
|
||||
const TYPE_MAPPING = typeMapping();
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 5 });
|
||||
|
||||
@@ -20,109 +19,87 @@ async function scrape() {
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
return scrapeLatestTorrents()
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return lastScrape.save();
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return lastScrape.save();
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function updateSeeders(torrent) {
|
||||
return limiter.schedule(() => leetx.torrent(torrent.torrentId));
|
||||
return limiter.schedule(() => animetorrrent.torrent(torrent.torrentId));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
leetx.Categories.MOVIE,
|
||||
leetx.Categories.ANIME,
|
||||
leetx.Categories.OVA
|
||||
animetorrrent.Categories.MOVIE,
|
||||
animetorrrent.Categories.ANIME,
|
||||
animetorrrent.Categories.OVA
|
||||
];
|
||||
|
||||
return Promises.sequence(
|
||||
allowedCategories.map(
|
||||
(category) => () => scrapeLatestTorrentsForCategory(category)
|
||||
)
|
||||
).then((entries) => entries.reduce((a, b) => a.concat(b), []));
|
||||
return Promises.sequence(allowedCategories
|
||||
.map((category) => () => scrapeLatestTorrentsForCategory(category)))
|
||||
.then((entries) => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||
console.log({Scraper: `Scrapping ${NAME} ${category} category page ${page}`});
|
||||
return leetx
|
||||
.browse({ category, page })
|
||||
.catch((error) => {
|
||||
console.warn(
|
||||
`Failed ${NAME} scrapping for [${page}] ${category} due: `,
|
||||
error
|
||||
);
|
||||
return Promise.resolve([]);
|
||||
})
|
||||
.then((torrents) => Promise.all(torrents.map((torrent) => limiter.schedule(() => processTorrentRecord(torrent)))))
|
||||
.then((resolved) => resolved.length > 0 && page < untilPage(category) ? scrapeLatestTorrentsForCategory(category, page + 1) : Promise.resolve());
|
||||
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||
return animetorrrent
|
||||
.browse({ category, page })
|
||||
.catch((error) => {
|
||||
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||
return Promise.resolve([]);
|
||||
})
|
||||
.then((torrents) => Promise.all(torrents.map((torrent) => limiter.schedule(() => processEntry(torrent)))))
|
||||
.then((resolved) => resolved.length > 0 && page < untilPage(category)
|
||||
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||
: Promise.resolve());
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await checkAndUpdateTorrent({ provider: NAME, ...record })) {
|
||||
return record;
|
||||
}
|
||||
const torrentEntrys = await leetx
|
||||
.torrent(record.torrentId)
|
||||
.catch(() => undefined);
|
||||
if (torrentEntrys === undefined) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
return Promise.allSettled(
|
||||
torrentEntrys.map(async (torrentFound) => {
|
||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||
return Promise.resolve("Invalid torrent record");
|
||||
}
|
||||
if (isNaN(torrentFound.uploadDate)) {
|
||||
console.warn(
|
||||
`Incorrect upload date for [${torrentFound.infoHash}] ${torrentFound.name}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (await checkAndUpdateTorrent(torrentFound)) {
|
||||
return torrentFound;
|
||||
}
|
||||
if (!torrentFound.size) {
|
||||
await updateTorrentSize(torrentFound)
|
||||
.catch((err) => Promise.resolve(err))
|
||||
}
|
||||
if (!torrentFound.seeders) {
|
||||
await updateCurrentSeeders(torrentFound)
|
||||
.then(response => response.seeders === 0 ? delete response.seeders : response)
|
||||
}
|
||||
if (!torrentFound.imdbId) {
|
||||
torrentFound.imdbId = await getImdbId(torrentFound.original_name, torrentFound.year, TYPE_MAPPING[torrentFound.category])
|
||||
}
|
||||
const torrent = {
|
||||
infoHash: torrentFound.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: torrentFound.torrentId,
|
||||
title: torrentFound.title.replace(/\t|\s+/g, " ").trim(),
|
||||
type: Type.ANIME,
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
return createTorrentEntry(torrent);
|
||||
})
|
||||
);
|
||||
async function processEntry(entry) {
|
||||
return animetorrrent.torrent(entry.torrentId)
|
||||
.then(records => Promises.sequence(records.map(record => () => processTorrentRecord(record))))
|
||||
.catch(() => undefined);
|
||||
}
|
||||
|
||||
function typeMapping() {
|
||||
const mapping = {};
|
||||
mapping[leetx.Categories.MOVIE] = Type.MOVIE;
|
||||
mapping[leetx.Categories.ANIME] = Type.SERIES;
|
||||
mapping[leetx.Categories.OVA] = Type.ANIME
|
||||
return mapping;
|
||||
async function processTorrentRecord(foundTorrent) {
|
||||
if (await checkAndUpdateTorrent({ provider: NAME, ...foundTorrent })) {
|
||||
return foundTorrent;
|
||||
}
|
||||
|
||||
if (!foundTorrent.size) {
|
||||
await updateTorrentSize(foundTorrent);
|
||||
}
|
||||
if (!Number.isInteger(foundTorrent.seeders)) {
|
||||
await updateCurrentSeeders(foundTorrent);
|
||||
}
|
||||
if (!foundTorrent.imdbId && !foundTorrent.kitsuId) {
|
||||
const info = { title: foundTorrent.originalName, year: foundTorrent.year };
|
||||
foundTorrent.kitsuId = await getKitsuId(info).catch(() => undefined);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: foundTorrent.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: foundTorrent.torrentId,
|
||||
title: foundTorrent.title,
|
||||
type: Type.ANIME,
|
||||
imdbId: foundTorrent.imdbId,
|
||||
kitsuId: foundTorrent.kitsuId,
|
||||
uploadDate: foundTorrent.uploadDate,
|
||||
seeders: foundTorrent.seeders,
|
||||
size: foundTorrent.size,
|
||||
files: foundTorrent.files,
|
||||
languages: foundTorrent.languages
|
||||
};
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
function untilPage(category) {
|
||||
if (leetx.Categories.ANIME === category) {
|
||||
if (animetorrrent.Categories.ANIME === category) {
|
||||
return 5;
|
||||
}
|
||||
if (leetx.Categories.OVA === category) {
|
||||
if (animetorrrent.Categories.OVA === category) {
|
||||
return 3;
|
||||
}
|
||||
return UNTIL_PAGE;
|
||||
|
||||
Reference in New Issue
Block a user