mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
[scraper] adds nyaa pantsu scraper
This commit is contained in:
@@ -4,7 +4,7 @@ const Sugar = require('sugar-date');
|
||||
const decode = require('magnet-uri');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { escapeHTML } = require('../../lib/metadata');
|
||||
const { getRandomUserAgent, defaultOptionsWithProxy } = require('../../lib/request_helper');
|
||||
const { getRandomUserAgent, defaultOptionsWithProxy } = require('../../lib/requestHelper');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://1337x.to'
|
||||
|
||||
@@ -2,7 +2,7 @@ const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const moment = require('moment');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { getRandomUserAgent } = require('./../../lib/request_helper');
|
||||
const { getRandomUserAgent } = require('./../../lib/requestHelper');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://eztv.io'
|
||||
|
||||
63
scraper/scrapers/nyaapantsu/nyaa_pantsu_api.js
Normal file
63
scraper/scrapers/nyaapantsu/nyaa_pantsu_api.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const { pantsu } = require('nyaapi')
|
||||
|
||||
const Categories = {
|
||||
ANIME: {
|
||||
ALL: '3_',
|
||||
ENGLISH: '3_5',
|
||||
RAW: '3_6',
|
||||
MUSIC_VIDEO: '3_12',
|
||||
NON_ENGLISH: '3_13',
|
||||
},
|
||||
LIVE_ACTION: {
|
||||
ALL: '5_',
|
||||
ENGLISH: '5_9',
|
||||
RAW: '5_11',
|
||||
PROMOTIONAL_VIDEO: '5_10',
|
||||
NON_ENGLISH: '5_18',
|
||||
}
|
||||
}
|
||||
|
||||
function torrent(torrentId) {
|
||||
if (!torrentId) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} search`));
|
||||
}
|
||||
|
||||
return pantsu.infoRequest(torrentId)
|
||||
.then(result => parseTorrent(result));
|
||||
}
|
||||
|
||||
function search(query) {
|
||||
return pantsu.search(query)
|
||||
.then(results => results.map(torrent => parseTorrent(torrent)));
|
||||
}
|
||||
|
||||
function browse(config = {}) {
|
||||
const page = config.page || 1;
|
||||
const category = config.category || Categories.ANIME.ENGLISH;
|
||||
|
||||
return pantsu.list(category, page)
|
||||
.then(results => results.map(torrent => parseTorrent(torrent)));
|
||||
}
|
||||
|
||||
function parseTorrent(torrent) {
|
||||
return {
|
||||
title: torrent.name.replace(/\t|\s+/g, ' ').trim(),
|
||||
torrentId: torrent.id,
|
||||
infoHash: torrent.hash.trim().toLowerCase(),
|
||||
magnetLink: torrent.magnet,
|
||||
torrentLink: torrent.torrent,
|
||||
seeders: torrent.seeders,
|
||||
size: torrent.filesize,
|
||||
uploadDate: new Date(torrent.date),
|
||||
category: `${torrent.category}_${torrent.sub_category}`,
|
||||
languages: torrent.languages ? torrent.languages.join(',') : undefined,
|
||||
files: torrent.file_list && torrent.file_list.length ? torrent.file_list.map((file, fileId) => ({
|
||||
fileIndex: fileId,
|
||||
name: file.path.replace(/([^\/]+$)/, '$1'),
|
||||
path: file.path,
|
||||
size: file.filesize
|
||||
})) : undefined
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse, Categories };
|
||||
96
scraper/scrapers/nyaapantsu/nyaa_pantsu_scraper.js
Normal file
96
scraper/scrapers/nyaapantsu/nyaa_pantsu_scraper.js
Normal file
@@ -0,0 +1,96 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const pantsu = require('./nyaa_pantsu_api');
|
||||
const { Type } = require('../../lib/types');
|
||||
const Promises = require('../../lib/promises');
|
||||
const repository = require('../../lib/repository');
|
||||
const { updateCurrentSeeders, updateTorrentSize } = require('../../lib/torrent');
|
||||
const { createTorrentEntry, checkAndUpdateTorrent } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'NyaaPantsu';
|
||||
const UNTIL_PAGE = 5
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 5 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
// const ids = ['1033095'];
|
||||
// return Promise.all(ids.map(id => limiter.schedule(() => pantsu.torrent(id)
|
||||
// .then(torrent => processTorrentRecord(torrent)))))
|
||||
// .then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
return scrapeLatestTorrents()
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return lastScrape.save();
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function updateSeeders(torrent) {
|
||||
return limiter.schedule(() => pantsu.torrent(torrent.torrentId))
|
||||
.then(foundTorrent => {
|
||||
if (Number.isInteger(foundTorrent.seeders)) {
|
||||
return [foundTorrent];
|
||||
}
|
||||
return []
|
||||
});
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
pantsu.Categories.ANIME.ENGLISH
|
||||
];
|
||||
|
||||
return Promises.sequence(allowedCategories.map(category => () => scrapeLatestTorrentsForCategory(category)))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function scrapeLatestTorrentsForCategory(category, page = 1) {
|
||||
console.log(`Scrapping ${NAME} ${category} category page ${page}`);
|
||||
return pantsu.browse(({ page }))
|
||||
.catch(error => {
|
||||
console.warn(`Failed ${NAME} scrapping for [${page}] ${category} due: `, error);
|
||||
return Promise.resolve([]);
|
||||
})
|
||||
.then(torrents => Promise.all(torrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent)
|
||||
.catch(error => {
|
||||
console.warn(`Failed processing [${torrent.infoHash}] ${torrent.title} due: `, error);
|
||||
return Promise.resolve();
|
||||
})))))
|
||||
.then(resolved => resolved.length > 0 && page < UNTIL_PAGE
|
||||
? scrapeLatestTorrentsForCategory(category, page + 1)
|
||||
: Promise.resolve());
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (!record || await checkAndUpdateTorrent(record)) {
|
||||
return record;
|
||||
}
|
||||
|
||||
if (!record.size) {
|
||||
await updateTorrentSize(record)
|
||||
}
|
||||
if (record.seeders === null || record.seeders === undefined) {
|
||||
await updateCurrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: record.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: record.torrentId,
|
||||
title: record.title,
|
||||
type: Type.ANIME,
|
||||
size: record.size,
|
||||
seeders: record.seeders,
|
||||
uploadDate: record.uploadDate,
|
||||
languages: record.languages,
|
||||
files: record.files || undefined
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent).then(() => torrent);
|
||||
}
|
||||
|
||||
module.exports = { scrape, updateSeeders, NAME };
|
||||
@@ -1,6 +1,6 @@
|
||||
const needle = require('needle');
|
||||
const Promises = require('../../lib/promises');
|
||||
const { getRandomUserAgent } = require('./../../lib/request_helper');
|
||||
const { getRandomUserAgent } = require('./../../lib/requestHelper');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://yts.mx'
|
||||
|
||||
Reference in New Issue
Block a user