mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
moves scraper to a subpackage
This commit is contained in:
37
scraper/README.md
Normal file
37
scraper/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Torrentio Scraper
|
||||
|
||||
## Initial dumps
|
||||
|
||||
### The Pirate Bay
|
||||
|
||||
https://mega.nz/#F!tktzySBS!ndSEaK3Z-Uc3zvycQYxhJA
|
||||
|
||||
https://thepiratebay.org/static/dump/csv/
|
||||
|
||||
### Kickass
|
||||
|
||||
https://mega.nz/#F!tktzySBS!ndSEaK3Z-Uc3zvycQYxhJA
|
||||
|
||||
https://web.archive.org/web/20150416071329/http://kickass.to/api
|
||||
|
||||
### RARBG
|
||||
|
||||
Scrape movie and tv catalog using [www.webscraper.io](https://www.webscraper.io/) for available `imdbIds` and use those via the api to search for torrents.
|
||||
|
||||
Movies sitemap
|
||||
```json
|
||||
{"_id":"rarbg-movies","startUrl":["https://rarbgmirror.org/catalog/movies/[1-4110]"],"selectors":[{"id":"rarbg-movie-imdb-id","type":"SelectorHTML","parentSelectors":["_root"],"selector":".lista-rounded table td[width]","multiple":true,"regex":"tt[0-9]+","delay":0}]}
|
||||
```
|
||||
|
||||
TV sitemap
|
||||
```json
|
||||
{"_id":"rarbg-tv","startUrl":["https://rarbgmirror.org/catalog/tv/[1-609]"],"selectors":[{"id":"rarbg-tv-imdb-id","type":"SelectorHTML","parentSelectors":["_root"],"selector":".lista-rounded table td[width='110']","multiple":true,"regex":"tt[0-9]+","delay":0}]}
|
||||
```
|
||||
|
||||
### Migrating Database
|
||||
|
||||
When migrating database to a new one it is important to alter the `files_id_seq` sequence to the maximum file id value plus 1.
|
||||
|
||||
```sql
|
||||
ALTER SEQUENCE files_id_seq RESTART WITH <last_file_id + 1>;
|
||||
```
|
||||
40
scraper/index.js
Normal file
40
scraper/index.js
Normal file
@@ -0,0 +1,40 @@
|
||||
require('dotenv').config();
|
||||
const express = require("express");
|
||||
const server = express();
|
||||
const schedule = require('node-schedule');
|
||||
const { connect } = require('./lib/repository');
|
||||
const thepiratebayScraper = require('./scrapers/thepiratebay/thepiratebay_scraper');
|
||||
const horribleSubsScraper = require('./scrapers/horriblesubs/horriblesubs_scraper');
|
||||
const leetxScraper = require('./scrapers/1337x/1337x_scraper');
|
||||
const kickassScraper = require('./scrapers/kickass/kickass_scraper');
|
||||
const rarbgScraper = require('./scrapers/rarbg/rarbg_scraper');
|
||||
const thepiratebayDumpScraper = require('./scrapers/thepiratebay/thepiratebay_dump_scraper');
|
||||
const thepiratebayUnofficialDumpScraper = require('./scrapers/thepiratebay/thepiratebay_unofficial_dump_scraper');
|
||||
|
||||
const PROVIDERS = [
|
||||
horribleSubsScraper,
|
||||
rarbgScraper,
|
||||
thepiratebayScraper,
|
||||
kickassScraper,
|
||||
leetxScraper
|
||||
];
|
||||
const SCRAPE_CRON = process.env.SCRAPE_CRON || '* * 0/4 * * *';
|
||||
|
||||
async function scrape() {
|
||||
return PROVIDERS
|
||||
.reduce(async (previousPromise, nextProvider) => {
|
||||
await previousPromise;
|
||||
return nextProvider.scrape().catch(() => Promise.resolve());
|
||||
}, Promise.resolve());
|
||||
}
|
||||
|
||||
server.get('/', function (req, res) {
|
||||
res.send(200);
|
||||
});
|
||||
|
||||
server.listen(process.env.PORT || 7000, async function () {
|
||||
await connect();
|
||||
schedule.scheduleJob(SCRAPE_CRON, () => scrape());
|
||||
console.log('Scraper started');
|
||||
scrape();
|
||||
});
|
||||
81
scraper/lib/cache.js
Normal file
81
scraper/lib/cache.js
Normal file
@@ -0,0 +1,81 @@
|
||||
const cacheManager = require('cache-manager');
|
||||
const mangodbStore = require('cache-manager-mongodb');
|
||||
|
||||
const GLOBAL_KEY_PREFIX = 'stremio-torrentio';
|
||||
const IMDB_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|imdb_id`;
|
||||
const METADATA_PREFIX = `${GLOBAL_KEY_PREFIX}|metadata`;
|
||||
const TORRENT_FILES_KEY_PREFIX = `stremio-tpb|files`;
|
||||
|
||||
const GLOBAL_TTL = process.env.METADATA_TTL || 7 * 24 * 60 * 60; // 7 days
|
||||
const MEMORY_TTL = process.env.METADATA_TTL || 2 * 60 * 60; // 2 hours
|
||||
|
||||
const MONGO_URI = process.env.MONGODB_URI;
|
||||
|
||||
const memoryCache = initiateMemoryCache();
|
||||
const remoteCache = initiateRemoteCache();
|
||||
const torrentFilesCache = initiateTorrentFilesCache();
|
||||
|
||||
function initiateTorrentFilesCache() {
|
||||
if (MONGO_URI) {
|
||||
return cacheManager.caching({
|
||||
store: mangodbStore,
|
||||
uri: MONGO_URI,
|
||||
options: {
|
||||
collection: 'cacheManager',
|
||||
},
|
||||
ttl: GLOBAL_TTL,
|
||||
ignoreCacheErrors: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function initiateRemoteCache() {
|
||||
if (MONGO_URI) {
|
||||
return cacheManager.caching({
|
||||
store: mangodbStore,
|
||||
uri: MONGO_URI,
|
||||
options: {
|
||||
collection: 'torrentio_scraper_collection',
|
||||
},
|
||||
ttl: GLOBAL_TTL,
|
||||
ignoreCacheErrors: true
|
||||
});
|
||||
} else {
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: GLOBAL_TTL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function initiateMemoryCache() {
|
||||
return cacheManager.caching({
|
||||
store: 'memory',
|
||||
ttl: MEMORY_TTL
|
||||
});
|
||||
}
|
||||
|
||||
function retrieveTorrentFiles(infoHash) {
|
||||
return torrentFilesCache.get(`${TORRENT_FILES_KEY_PREFIX}:${infoHash}`)
|
||||
.then((results) => {
|
||||
if (!results) {
|
||||
throw new Error('No cached files found');
|
||||
}
|
||||
return results;
|
||||
});
|
||||
}
|
||||
|
||||
function cacheWrap(cache, key, method, options) {
|
||||
return cache.wrap(key, method, options);
|
||||
}
|
||||
|
||||
function cacheWrapImdbId(key, method) {
|
||||
return cacheWrap(remoteCache, `${IMDB_ID_PREFIX}:${key}`, method, { ttl: GLOBAL_TTL });
|
||||
}
|
||||
|
||||
function cacheWrapMetadata(id, method) {
|
||||
return cacheWrap(memoryCache, `${METADATA_PREFIX}:${id}`, method, { ttl: GLOBAL_TTL });
|
||||
}
|
||||
|
||||
module.exports = { cacheWrapImdbId, cacheWrapMetadata, retrieveTorrentFiles };
|
||||
|
||||
119
scraper/lib/metadata.js
Normal file
119
scraper/lib/metadata.js
Normal file
@@ -0,0 +1,119 @@
|
||||
const needle = require('needle');
|
||||
const nameToImdb = require('name-to-imdb');
|
||||
const bing = require('nodejs-bing');
|
||||
const { cacheWrapImdbId, cacheWrapMetadata } = require('./cache');
|
||||
const { Type } = require('./types');
|
||||
|
||||
const CINEMETA_URL = 'https://v3-cinemeta.strem.io';
|
||||
const KITSU_URL = 'https://anime-kitsu.now.sh';
|
||||
const TIMEOUT = 20000;
|
||||
|
||||
function getMetadata(id, type = Type.SERIES) {
|
||||
if (!id) {
|
||||
return Promise.reject("no valid id provided");
|
||||
}
|
||||
|
||||
const key = id.match(/^\d+$/) ? `kitsu:${id}` : id;
|
||||
const metaType = type === Type.MOVIE ? Type.MOVIE : Type.SERIES;
|
||||
return cacheWrapMetadata(key,
|
||||
() => _requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`)
|
||||
.catch(() => _requestMetadata(`${CINEMETA_URL}/meta/${metaType}/${key}.json`))
|
||||
.catch((error) => {
|
||||
throw new Error(`failed metadata query ${kitsuId} due: ${error.message}`);
|
||||
}));
|
||||
}
|
||||
|
||||
function _requestMetadata(url) {
|
||||
return needle('get', url, { open_timeout: TIMEOUT })
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (body && body.meta && body.meta.id) {
|
||||
return {
|
||||
kitsuId: body.meta.kitsu_id,
|
||||
imdbId: body.meta.imdb_id,
|
||||
title: body.meta.name,
|
||||
year: body.meta.year,
|
||||
country: body.meta.country,
|
||||
genres: body.meta.genres,
|
||||
videos: (body.meta.videos || [])
|
||||
.map((video) => video.imdbSeason
|
||||
? {
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
imdbSeason: video.imdbSeason,
|
||||
imdbEpisode: video.imdbEpisode
|
||||
}
|
||||
: {
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
kitsuId: video.kitsu_id,
|
||||
kitsuEpisode: video.kitsuEpisode,
|
||||
released: video.released
|
||||
}
|
||||
),
|
||||
episodeCount: Object.values((body.meta.videos || [])
|
||||
.filter((entry) => entry.season !== 0)
|
||||
.sort((a, b) => a.season - b.season)
|
||||
.reduce((map, next) => {
|
||||
map[next.season] = map[next.season] + 1 || 1;
|
||||
return map;
|
||||
}, {})),
|
||||
totalCount: body.meta.videos && body.meta.videos
|
||||
.filter((entry) => entry.season !== 0).length
|
||||
};
|
||||
} else {
|
||||
throw new Error('No search results');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function escapeTitle(title) {
|
||||
return title.toLowerCase()
|
||||
.normalize('NFKD') // normalize non-ASCII characters
|
||||
.replace(/[\u0300-\u036F]/g, '')
|
||||
.replace(/&/g, 'and')
|
||||
.replace(/[;, ~\-]+/g, ' ') // replace dots, commas or underscores with spaces
|
||||
.replace(/[^\w ()+#@!']+/g, '') // remove all non-alphanumeric chars
|
||||
.trim();
|
||||
}
|
||||
|
||||
async function getImdbId(info, type) {
|
||||
const name = escapeTitle(info.title);
|
||||
const year = info.year || info.date && info.date.slice(0, 4);
|
||||
const key = `${name}_${year}_${type}`;
|
||||
|
||||
return cacheWrapImdbId(key,
|
||||
() => new Promise((resolve, reject) => {
|
||||
nameToImdb({ name, year, type }, function (err, res) {
|
||||
if (res) {
|
||||
resolve(res);
|
||||
} else {
|
||||
reject(err || new Error('failed imdbId search'));
|
||||
}
|
||||
});
|
||||
}).catch(() => bing.web(`${name} ${year || ''} ${type} imdb`)
|
||||
.then(results => results
|
||||
.map((result) => result.link)
|
||||
.find(result => result.includes('imdb.com/title/')))
|
||||
.then(result => result && result.match(/imdb\.com\/title\/(tt\d+)/))
|
||||
.then(match => match && match[1])));
|
||||
}
|
||||
|
||||
async function getKitsuId(info) {
|
||||
const title = escapeTitle(info.title);
|
||||
const season = info.season > 1 ? ` S${info.season}` : '';
|
||||
const query = `${title}${season}`;
|
||||
|
||||
return cacheWrapImdbId(query,
|
||||
() => needle('get', `${KITSU_URL}/catalog/series/kitsu-anime-list/search=${query}.json`, { open_timeout: 60000 })
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (body && body.metas && body.metas.length) {
|
||||
return body.metas[0].id.replace('kitsu:', '');
|
||||
} else {
|
||||
throw new Error('No search results');
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
module.exports = { getMetadata, getImdbId, getKitsuId };
|
||||
151
scraper/lib/repository.js
Normal file
151
scraper/lib/repository.js
Normal file
@@ -0,0 +1,151 @@
|
||||
const { Sequelize } = require('sequelize');
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
const DATABASE_URI = process.env.DATABASE_URI;
|
||||
|
||||
const database = new Sequelize(DATABASE_URI, { logging: false });
|
||||
|
||||
const Provider = database.define('provider', {
|
||||
name: { type: Sequelize.STRING(32), primaryKey: true },
|
||||
lastScraped: { type: Sequelize.DATE },
|
||||
lastScrapedId: { type: Sequelize.STRING(128) }
|
||||
});
|
||||
|
||||
const Torrent = database.define('torrent', {
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
provider: { type: Sequelize.STRING(32), allowNull: false },
|
||||
torrentId: { type: Sequelize.STRING(128) },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
type: { type: Sequelize.STRING(16), allowNull: false },
|
||||
uploadDate: { type: Sequelize.DATE, allowNull: false },
|
||||
seeders: { type: Sequelize.SMALLINT },
|
||||
trackers: { type: Sequelize.STRING(4096) }
|
||||
});
|
||||
|
||||
const File = database.define('file',
|
||||
{
|
||||
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
|
||||
infoHash: {
|
||||
type: Sequelize.STRING(64),
|
||||
allowNull: false,
|
||||
references: { model: Torrent, key: 'infoHash' },
|
||||
onDelete: 'CASCADE'
|
||||
},
|
||||
fileIndex: { type: Sequelize.INTEGER },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false },
|
||||
size: { type: Sequelize.BIGINT },
|
||||
imdbId: { type: Sequelize.STRING(32) },
|
||||
imdbSeason: { type: Sequelize.INTEGER },
|
||||
imdbEpisode: { type: Sequelize.INTEGER },
|
||||
kitsuId: { type: Sequelize.INTEGER },
|
||||
kitsuEpisode: { type: Sequelize.INTEGER }
|
||||
},
|
||||
{
|
||||
indexes: [
|
||||
{ unique: true, fields: ['infoHash'], where: { fileIndex: { [Op.eq]: null } } },
|
||||
{ unique: true, fields: ['infoHash', 'fileIndex', 'imdbSeason', 'imdbEpisode'] },
|
||||
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
|
||||
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
|
||||
]
|
||||
}
|
||||
);
|
||||
|
||||
const SkipTorrent = database.define('skip_torrent', {
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
});
|
||||
|
||||
const FailedImdbTorrent = database.define('failed_imdb_torrent', {
|
||||
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
|
||||
title: { type: Sequelize.STRING(256), allowNull: false }
|
||||
});
|
||||
|
||||
function connect() {
|
||||
return database.sync({ alter: true });
|
||||
}
|
||||
|
||||
function getProvider(provider) {
|
||||
return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider });
|
||||
}
|
||||
|
||||
function updateProvider(provider) {
|
||||
return Provider.update(provider, { where: { name: { [Op.eq]: provider.name } } });
|
||||
}
|
||||
|
||||
function getTorrent(torrent) {
|
||||
return Torrent.findByPk(torrent.infoHash)
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${torrent.infoHash}`);
|
||||
}
|
||||
return result.dataValues;
|
||||
})
|
||||
}
|
||||
|
||||
function getTorrentsBasedOnTitle(titleQuery, type) {
|
||||
return Torrent.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` }, type: type } });
|
||||
}
|
||||
|
||||
function createTorrent(torrent) {
|
||||
return Torrent.upsert(torrent);
|
||||
}
|
||||
|
||||
function createFile(file) {
|
||||
return File.upsert(file);
|
||||
}
|
||||
|
||||
function getFiles(torrent) {
|
||||
return File.findAll({ where: { infoHash: torrent.infoHash } });
|
||||
}
|
||||
|
||||
function getFilesBasedOnTitle(titleQuery) {
|
||||
return File.findAll({ where: { title: { [Op.iLike]: `%${titleQuery}%` } } });
|
||||
}
|
||||
|
||||
function deleteFile(file) {
|
||||
return File.destroy({ where: { id: file.id } })
|
||||
}
|
||||
|
||||
function getSkipTorrent(torrent) {
|
||||
return SkipTorrent.findByPk(torrent.infoHash)
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
return getFailedImdbTorrent(torrent);
|
||||
}
|
||||
return result.dataValues;
|
||||
})
|
||||
}
|
||||
|
||||
function createSkipTorrent(torrent) {
|
||||
return SkipTorrent.upsert({ infoHash: torrent.infoHash });
|
||||
}
|
||||
|
||||
function getFailedImdbTorrent(torrent) {
|
||||
return FailedImdbTorrent.findByPk(torrent.infoHash)
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${torrent.infoHash}`);
|
||||
}
|
||||
return result.dataValues;
|
||||
})
|
||||
}
|
||||
|
||||
function createFailedImdbTorrent(torrent) {
|
||||
return FailedImdbTorrent.upsert(torrent);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
connect,
|
||||
getProvider,
|
||||
updateProvider,
|
||||
createTorrent,
|
||||
getTorrent,
|
||||
getTorrentsBasedOnTitle,
|
||||
createFile,
|
||||
getFiles,
|
||||
getFilesBasedOnTitle,
|
||||
deleteFile,
|
||||
getSkipTorrent,
|
||||
createSkipTorrent,
|
||||
createFailedImdbTorrent
|
||||
};
|
||||
144
scraper/lib/torrent.js
Normal file
144
scraper/lib/torrent.js
Normal file
@@ -0,0 +1,144 @@
|
||||
const torrentStream = require('torrent-stream');
|
||||
const needle = require('needle');
|
||||
const parseTorrent = require('parse-torrent');
|
||||
const async = require('async');
|
||||
const decode = require('magnet-uri');
|
||||
const { retrieveTorrentFiles } = require('./cache');
|
||||
|
||||
const MAX_PEER_CONNECTIONS = process.env.MAX_PEER_CONNECTIONS || 20;
|
||||
const SEEDS_CHECK_TIMEOUT = process.env.SEEDS_CHECK_TIMEOUT || 10 * 1000; // 10 secs
|
||||
const EXTENSIONS = ["3g2", "3gp", "avi", "flv", "mkv", "mov", "mp2", "mp4", "mpe", "mpeg", "mpg", "mpv", "webm", "wmv"];
|
||||
|
||||
module.exports.updateCurrentSeeders = function (torrent) {
|
||||
return new Promise((resolve) => {
|
||||
if (!torrent.magnetLink) {
|
||||
return resolve(0);
|
||||
}
|
||||
const Tracker = require("peer-search/tracker");
|
||||
|
||||
const seeders = {};
|
||||
const decodedMagnetLink = decode(torrent.magnetLink);
|
||||
const trackers = decodedMagnetLink && decodedMagnetLink.tr;
|
||||
const callback = () => resolve(Math.max(...Object.values(seeders).map(values => values[0]).concat(0)));
|
||||
setTimeout(callback, SEEDS_CHECK_TIMEOUT);
|
||||
|
||||
async.each(trackers, function (tracker, ready) {
|
||||
const t = new Tracker(tracker, {}, torrent.infoHash);
|
||||
console.error = () => 0; // do nothing
|
||||
t.run();
|
||||
t.on("info", function (inf) {
|
||||
seeders[tracker] = [inf.seeders, inf.leechers];
|
||||
ready();
|
||||
});
|
||||
}, callback);
|
||||
}).then((seeders) => ({ ...torrent, seeders: torrent.seeders || seeders }));
|
||||
};
|
||||
|
||||
module.exports.torrentFiles = function (torrent) {
|
||||
return getFilesFromObject(torrent)
|
||||
.catch(() => filesFromTorrentFile(torrent))
|
||||
.catch(() => filesFromTorrentStream(torrent))
|
||||
.catch(() => filesFromCache(torrent.infoHash))
|
||||
.then((files) => filterVideos(files))
|
||||
.then((files) => filterSamples(files))
|
||||
.then((files) => filterExtras(files));
|
||||
};
|
||||
|
||||
function getFilesFromObject(torrent) {
|
||||
if (torrent.files && torrent.files.length) {
|
||||
return Promise.resolve(torrent.files);
|
||||
}
|
||||
return Promise.reject("No files in the object");
|
||||
}
|
||||
|
||||
function filesFromCache(infoHash) {
|
||||
return retrieveTorrentFiles(infoHash)
|
||||
.then((files) => files.map((file) => ({
|
||||
fileIndex: parseInt(file.match(/^(\d+)@@/)[1]),
|
||||
name: file.replace(/.+\/|^\d+@@/, ''),
|
||||
path: file.replace(/^\d+@@/, ''),
|
||||
size: 300000000
|
||||
})));
|
||||
}
|
||||
|
||||
async function filesFromTorrentFile(torrent) {
|
||||
if (!torrent.torrentLink) {
|
||||
return Promise.reject(new Error("no torrentLink"));
|
||||
}
|
||||
|
||||
return needle('get', torrent.torrentLink, { open_timeout: 10000 })
|
||||
.then((response) => {
|
||||
if (!response.body || response.statusCode !== 200) {
|
||||
throw new Error('torrent not found')
|
||||
}
|
||||
return response.body
|
||||
})
|
||||
.then((body) => parseTorrent(body))
|
||||
.then((info) => info.files.map((file, fileId) => ({
|
||||
fileIndex: fileId,
|
||||
name: file.name,
|
||||
path: file.path.replace(/^[^\/]+\//, ''),
|
||||
size: file.length
|
||||
})));
|
||||
}
|
||||
|
||||
async function filesFromTorrentStream(torrent) {
|
||||
if (!torrent.infoHash && !torrent.magnetLink) {
|
||||
return Promise.reject(new Error("no infoHash or magnetLink"));
|
||||
}
|
||||
return new Promise((resolve, rejected) => {
|
||||
const engine = new torrentStream(torrent.magnetLink || torrent.infoHash, { connections: MAX_PEER_CONNECTIONS });
|
||||
|
||||
engine.ready(() => {
|
||||
const files = engine.files
|
||||
.map((file, fileId) => ({
|
||||
fileIndex: fileId,
|
||||
name: file.name,
|
||||
path: file.path.replace(/^[^\/]+\//, ''),
|
||||
size: file.length
|
||||
}));
|
||||
|
||||
engine.destroy();
|
||||
resolve(files);
|
||||
});
|
||||
setTimeout(() => {
|
||||
engine.destroy();
|
||||
rejected(new Error('No available connections for torrent!'));
|
||||
}, 60000);
|
||||
});
|
||||
}
|
||||
|
||||
function filterVideos(files) {
|
||||
return files.filter((file) => {
|
||||
const match = file.path.match(/\.(\w{2,4})$/);
|
||||
return match && EXTENSIONS.includes(match[1]);
|
||||
});
|
||||
}
|
||||
|
||||
function filterSamples(files) {
|
||||
const maxSize = Math.max(...files.map(file => file.size));
|
||||
const isSample = file => file.name.match(/sample/i) && maxSize / file.size < 10;
|
||||
return files.filter(file => !isSample(file));
|
||||
}
|
||||
|
||||
function filterExtras(files) {
|
||||
const isExtra = file => file.path.match(/extras?\//i);
|
||||
return files.filter(file => !isExtra(file));
|
||||
}
|
||||
|
||||
function dynamicTimeout(torrent) {
|
||||
if (torrent.seeders < 5) {
|
||||
return 5000;
|
||||
} else if (torrent.seeders < 10) {
|
||||
return 7000;
|
||||
} else if (torrent.seeders < 20) {
|
||||
return 10000;
|
||||
} else if (torrent.seeders < 30) {
|
||||
return 15000;
|
||||
} else if (torrent.seeders < 50) {
|
||||
return 20000;
|
||||
} else {
|
||||
return 30000;
|
||||
}
|
||||
}
|
||||
|
||||
66
scraper/lib/torrentEntries.js
Normal file
66
scraper/lib/torrentEntries.js
Normal file
@@ -0,0 +1,66 @@
|
||||
const { parse } = require('parse-torrent-title');
|
||||
const { Type } = require('./types');
|
||||
const repository = require('./repository');
|
||||
const { getImdbId, getKitsuId } = require('./metadata');
|
||||
const { parseTorrentFiles } = require('./torrentFiles');
|
||||
|
||||
async function createTorrentEntry(torrent) {
|
||||
const titleInfo = parse(torrent.title);
|
||||
|
||||
if (titleInfo.seasons && torrent.type === Type.MOVIE) {
|
||||
// sometimes series torrent might be put into movies category
|
||||
torrent.type = Type.SERIES;
|
||||
}
|
||||
if (!torrent.imdbId && torrent.type !== Type.ANIME) {
|
||||
torrent.imdbId = await getImdbId(titleInfo, torrent.type)
|
||||
.catch(() => undefined);
|
||||
}
|
||||
if (torrent.imdbId && torrent.imdbId.length > 9 && torrent.imdbId.startsWith('tt0')) {
|
||||
// sanitize imdbId from redundant zeros
|
||||
torrent.imdbId = torrent.imdbId.replace(/tt0+([0-9]{7,})$/, 'tt$1');
|
||||
}
|
||||
if (!torrent.kitsuId && torrent.type === Type.ANIME) {
|
||||
torrent.kitsuId = await getKitsuId(titleInfo)
|
||||
.catch(() => undefined);
|
||||
}
|
||||
|
||||
if (!torrent.imdbId && !torrent.kitsuId && !titleInfo.complete) {
|
||||
console.log(`imdbId or kitsuId not found: ${torrent.title}`);
|
||||
repository.createFailedImdbTorrent(torrent);
|
||||
return;
|
||||
}
|
||||
|
||||
const files = await parseTorrentFiles(torrent);
|
||||
if (!files || !files.length) {
|
||||
console.log(`no video files found: ${torrent.title}`);
|
||||
return;
|
||||
}
|
||||
|
||||
return repository.createTorrent(torrent)
|
||||
.then(() => Promise.all(files.map(file => repository.createFile(file))))
|
||||
.then(() => console.log(`Created entry for ${torrent.title}`));
|
||||
}
|
||||
|
||||
async function createSkipTorrentEntry(torrent) {
|
||||
return repository.createSkipTorrent(torrent);
|
||||
}
|
||||
|
||||
async function getStoredTorrentEntry(torrent) {
|
||||
return repository.getSkipTorrent(torrent)
|
||||
.catch(() => repository.getTorrent(torrent))
|
||||
.catch(() => undefined);
|
||||
}
|
||||
|
||||
async function updateTorrentSeeders(torrent) {
|
||||
if (torrent.seeders === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
return repository.getTorrent(torrent)
|
||||
.then(stored => {
|
||||
stored.seeders = torrent.seeders;
|
||||
return stored.save();
|
||||
}).catch(() => undefined);
|
||||
}
|
||||
|
||||
module.exports = { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry, updateTorrentSeeders };
|
||||
297
scraper/lib/torrentFiles.js
Normal file
297
scraper/lib/torrentFiles.js
Normal file
@@ -0,0 +1,297 @@
|
||||
const moment = require('moment');
|
||||
const { parse } = require('parse-torrent-title');
|
||||
const { torrentFiles } = require('../lib/torrent');
|
||||
const { escapeTitle, getMetadata, getImdbId } = require('../lib/metadata');
|
||||
const { Type } = require('./types');
|
||||
|
||||
const MIN_SIZE = 10 * 1024 * 1024; // 10 MB
|
||||
|
||||
async function parseTorrentFiles(torrent) {
|
||||
const parsedTorrentName = parse(torrent.title);
|
||||
parsedTorrentName.hasMovies = parsedTorrentName.complete || !!torrent.title.match(/movies?(?:\W|$)/);
|
||||
const metadata = await getMetadata(torrent.kitsuId || torrent.imdbId, torrent.type || Type.MOVIE)
|
||||
.catch(() => undefined);
|
||||
|
||||
// if (metadata && metadata.type !== torrent.type && torrent.type !== Type.ANIME) {
|
||||
// throw new Error(`Mismatching entry type for ${torrent.name}: ${torrent.type}!=${metadata.type}`);
|
||||
// }
|
||||
|
||||
if (torrent.type === Type.MOVIE && !parsedTorrentName.seasons) {
|
||||
if (parsedTorrentName.complete) {
|
||||
return torrentFiles(torrent)
|
||||
.then(files => files.filter(file => file.size > MIN_SIZE))
|
||||
.then(files => Promise.all(files
|
||||
.map((file) => findMovieImdbId(file.name)
|
||||
.then((newImdbId) => ({
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
title: file.name,
|
||||
size: file.size,
|
||||
imdbId: newImdbId,
|
||||
})))))
|
||||
.catch(error => {
|
||||
console.log(`Failed getting files for ${torrent.title}`, error.message);
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
return [{
|
||||
infoHash: torrent.infoHash,
|
||||
title: torrent.title,
|
||||
size: torrent.size,
|
||||
imdbId: torrent.imdbId || metadata && metadata.imdb_id,
|
||||
kitsuId: torrent.kitsuId || metadata && metadata.kitsu_id
|
||||
}];
|
||||
}
|
||||
|
||||
return getSeriesFiles(torrent, parsedTorrentName)
|
||||
.then((files) => files
|
||||
.filter((file) => file.size > MIN_SIZE)
|
||||
.map((file) => parseSeriesFile(file, parsedTorrentName)))
|
||||
.then((files) => decomposeEpisodes(torrent, files, metadata))
|
||||
.then((files) => assignKitsuOrImdbEpisodes(files, metadata))
|
||||
.then((files) => Promise.all(files.map(file => file.isMovie
|
||||
? mapSeriesMovie(file, torrent)
|
||||
: mapSeriesEpisode(file, torrent, files))))
|
||||
.then((files) => files.reduce((a, b) => a.concat(b), []))
|
||||
.catch((error) => {
|
||||
console.log(`Failed getting files for ${torrent.title}`, error.message);
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
async function getSeriesFiles(torrent, parsedTorrentName) {
|
||||
if (parsedTorrentName.episode || (!parsedTorrentName.episodes && parsedTorrentName.date)) {
|
||||
return [{
|
||||
name: torrent.title,
|
||||
path: torrent.title,
|
||||
size: torrent.size
|
||||
}];
|
||||
}
|
||||
|
||||
return torrentFiles(torrent);
|
||||
}
|
||||
|
||||
async function mapSeriesEpisode(file, torrent, files) {
|
||||
if (!file.episodes && !file.kitsuEpisodes) {
|
||||
if (files.some(otherFile => otherFile.episodes || otherFile.kitsuEpisodes) || parse(torrent.title).seasons) {
|
||||
return Promise.resolve({
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
title: file.path || file.name,
|
||||
size: file.size,
|
||||
imdbId: torrent.imdbId || file.imdbId,
|
||||
});
|
||||
}
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
const episodeIndexes = [...(file.episodes || file.kitsuEpisodes).keys()];
|
||||
return Promise.resolve(episodeIndexes.map((index) => ({
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
title: file.path || file.name,
|
||||
size: file.size,
|
||||
imdbId: torrent.imdbId || file.imdbId,
|
||||
imdbSeason: file.season,
|
||||
imdbEpisode: file.episodes && file.episodes[index],
|
||||
kitsuId: torrent.kitsuId || file.kitsuId,
|
||||
kitsuEpisode: file.kitsuEpisodes && file.kitsuEpisodes[index]
|
||||
})))
|
||||
}
|
||||
|
||||
async function mapSeriesMovie(file, torrent) {
|
||||
return findMovieImdbId(file).then((imdbId) => [{
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
title: file.name,
|
||||
size: file.size,
|
||||
imdbId: imdbId
|
||||
}])
|
||||
}
|
||||
|
||||
function parseSeriesFile(file, parsedTorrentName) {
|
||||
const fileInfo = parse(file.name);
|
||||
// the episode may be in a folder containing season number
|
||||
if (!fileInfo.season && parsedTorrentName.season) {
|
||||
fileInfo.season = parsedTorrentName.season;
|
||||
} else if (!fileInfo.season && file.path.includes('/')) {
|
||||
const folders = file.path.split('/');
|
||||
const pathInfo = parse(folders[folders.length - 2]);
|
||||
fileInfo.season = pathInfo.season;
|
||||
}
|
||||
// force episode to any found number if it was not parsed
|
||||
if (!fileInfo.episodes) {
|
||||
const epMatcher = fileInfo.title.match(/(?<!movie\W*|film\W*)(?:^|\W)(\d{1,4})(?:a|b|v\d)?(?:\W|$)(?!movie|film)/i);
|
||||
fileInfo.episodes = epMatcher && [parseInt(epMatcher[1], 10)];
|
||||
fileInfo.episode = fileInfo.episodes && fileInfo.episodes[0];
|
||||
}
|
||||
fileInfo.isMovie = (parsedTorrentName.hasMovies && !fileInfo.season && (!fileInfo.episodes || !!fileInfo.year))
|
||||
|| (!fileInfo.season && !!file.name.match(/\b(?:\d+[ .]movie|movie[ .]\d+)\b/i));
|
||||
|
||||
return { ...file, ...fileInfo };
|
||||
}
|
||||
|
||||
async function decomposeEpisodes(torrent, files, metadata = { episodeCount: [] }) {
|
||||
if (files.every(file => !file.episodes && !file.date)) {
|
||||
return files;
|
||||
}
|
||||
// for anime type episodes are always absolute and for a single season
|
||||
if (torrent.type === Type.ANIME) {
|
||||
files
|
||||
.filter(file => file.episodes)
|
||||
.forEach(file => file.season = 1);
|
||||
return files;
|
||||
}
|
||||
|
||||
const sortedEpisodes = files
|
||||
.map(file => !file.isMovie && file.episodes || [])
|
||||
.reduce((a, b) => a.concat(b), [])
|
||||
.sort((a, b) => a - b);
|
||||
|
||||
if (sortedEpisodes.every(ep => ep > 100)
|
||||
&& sortedEpisodes.slice(1).some((ep, index) => ep - sortedEpisodes[index] > 10)
|
||||
&& sortedEpisodes.every(ep => metadata.episodeCount[div100(ep) - 1] >= mod100(ep))
|
||||
&& files.every(file => !file.season || file.episodes.every(ep => div100(ep) === file.season))) {
|
||||
decomposeConcatSeasonAndEpisodeFiles(torrent, files, metadata);
|
||||
} else if (files.every(file => (!file.season || !metadata.episodeCount[file.season - 1]) && file.date)) {
|
||||
decomposeDateEpisodeFiles(torrent, files, metadata);
|
||||
} else if (files.filter(file => !file.isMovie && file.episodes).every(file => !file.season && file.episodes) ||
|
||||
files.some(file => file.season && file.episodes && file.episodes
|
||||
.every(ep => metadata.episodeCount[file.season - 1] < ep))) {
|
||||
decomposeAbsoluteEpisodeFiles(torrent, files, metadata);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
function decomposeConcatSeasonAndEpisodeFiles(torrent, files, metadata) {
|
||||
// decompose concat season and episode files (ex. 101=S01E01) in case:
|
||||
// 1. file has a season, but individual files are concatenated with that season (ex. path Season 5/511 - Prize
|
||||
// Fighters.avi)
|
||||
// 2. file does not have a season and the episode does not go out of range for the concat season
|
||||
// episode count
|
||||
files
|
||||
.filter(file => file.episodes && file.episodes.every(ep => ep > 100))
|
||||
.filter(file => metadata.episodeCount[(file.season || div100(file.episodes[0])) - 1] < 100)
|
||||
.filter(file => file.season && file.episodes.every(ep => div100(ep) === file.season) || !file.season)
|
||||
.forEach(file => {
|
||||
file.season = div100(file.episodes[0]);
|
||||
file.episodes = file.episodes.map(ep => mod100(ep))
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function decomposeAbsoluteEpisodeFiles(torrent, files, metadata) {
|
||||
if (metadata.episodeCount.length === 0) {
|
||||
files
|
||||
.filter(file => !file.season && file.episodes && !file.isMovie)
|
||||
.forEach(file => {
|
||||
file.season = 1;
|
||||
});
|
||||
return;
|
||||
}
|
||||
files
|
||||
.filter(file => file.episodes && !file.isMovie)
|
||||
.forEach(file => {
|
||||
const seasonIdx = ([...metadata.episodeCount.keys()]
|
||||
.find((i) => metadata.episodeCount.slice(0, i + 1).reduce((a, b) => a + b) >= file.episodes[0])
|
||||
+ 1 || metadata.episodeCount.length) - 1;
|
||||
|
||||
file.season = seasonIdx + 1;
|
||||
file.episodes = file.episodes
|
||||
.map(ep => ep - metadata.episodeCount.slice(0, seasonIdx).reduce((a, b) => a + b, 0))
|
||||
});
|
||||
}
|
||||
|
||||
function decomposeDateEpisodeFiles(torrent, files, metadata) {
|
||||
if (!metadata || !metadata.videos || !metadata.videos.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const timeZoneOffset = getTimeZoneOffset(metadata.country);
|
||||
const offsetVideos = metadata.videos
|
||||
.reduce((map, video) => {
|
||||
const releaseDate = moment(video.released).utcOffset(timeZoneOffset).format('YYYY-MM-DD');
|
||||
map[releaseDate] = video;
|
||||
return map;
|
||||
}, {});
|
||||
|
||||
files
|
||||
.filter(file => file.date)
|
||||
.forEach(file => {
|
||||
const video = offsetVideos[file.date];
|
||||
if (video) {
|
||||
file.season = video.season;
|
||||
file.episodes = [video.episode];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getTimeZoneOffset(country) {
|
||||
switch (country) {
|
||||
case 'USA':
|
||||
return '-08:00';
|
||||
default:
|
||||
return '00:00';
|
||||
}
|
||||
}
|
||||
|
||||
function assignKitsuOrImdbEpisodes(files, metadata) {
|
||||
if (!metadata || !metadata.videos || !metadata.videos.length) {
|
||||
return files;
|
||||
}
|
||||
|
||||
const seriesMapping = metadata.videos
|
||||
.reduce((map, video) => {
|
||||
const episodeMap = map[video.season] || {};
|
||||
episodeMap[video.episode] = video;
|
||||
map[video.season] = episodeMap;
|
||||
return map;
|
||||
}, {});
|
||||
|
||||
if (metadata.videos.some(video => video.imdbSeason) || !metadata.imdbId) {
|
||||
// kitsu episode info is the base
|
||||
files
|
||||
.filter(file => file.season && file.episodes)
|
||||
.map(file => {
|
||||
const seasonMapping = seriesMapping[file.season];
|
||||
file.kitsuEpisodes = file.episodes;
|
||||
if (seasonMapping && seasonMapping[file.episodes[0]] && seasonMapping[file.episodes[0]].imdbSeason) {
|
||||
file.imdbId = metadata.imdbId;
|
||||
file.season = seasonMapping[file.episodes[0]].imdbSeason;
|
||||
file.episodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].imdbEpisode);
|
||||
} else {
|
||||
// no imdb mapping available for episode
|
||||
file.season = undefined;
|
||||
file.episodes = undefined;
|
||||
}
|
||||
})
|
||||
} else if (metadata.videos.some(video => video.kitsuEpisode)) {
|
||||
// imdb episode info is base
|
||||
files
|
||||
.filter(file => file.season && file.episodes)
|
||||
.forEach(file => {
|
||||
const seasonMapping = seriesMapping[file.season];
|
||||
if (seasonMapping && seasonMapping[file.episodes[0]] && seasonMapping[file.episodes[0]].kitsuId) {
|
||||
file.kitsuId = seasonMapping[file.episodes[0]].kitsuId;
|
||||
file.kitsuEpisodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].kitsuEpisode);
|
||||
}
|
||||
})
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function findMovieImdbId(title) {
|
||||
const parsedTitle = typeof title === 'string' ? parse(title) : title;
|
||||
return getImdbId(parsedTitle, Type.MOVIE).catch(() => undefined);
|
||||
}
|
||||
|
||||
function div100(episode) {
|
||||
return (episode / 100 >> 0); // floor to nearest int
|
||||
}
|
||||
|
||||
function mod100(episode) {
|
||||
return episode % 100;
|
||||
}
|
||||
|
||||
module.exports = { parseTorrentFiles };
|
||||
5
scraper/lib/types.js
Normal file
5
scraper/lib/types.js
Normal file
@@ -0,0 +1,5 @@
|
||||
exports.Type = {
|
||||
MOVIE: 'movie',
|
||||
SERIES: 'series',
|
||||
ANIME: 'anime'
|
||||
};
|
||||
172
scraper/manual/manual.js
Normal file
172
scraper/manual/manual.js
Normal file
@@ -0,0 +1,172 @@
|
||||
require('dotenv').config();
|
||||
const Bottleneck = require('bottleneck');
|
||||
const { parse } = require('parse-torrent-title');
|
||||
const repository = require('../lib/repository');
|
||||
const { parseTorrentFiles } = require('../lib/torrentFiles');
|
||||
const { Type } = require('../lib/types');
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function addMissingEpisodes() {
|
||||
const torrent = { infoHash: '0ec780c2c7f8d5b38e61827f0b53c77c3d22f955' };
|
||||
const torrentFiles = await require('../lib/torrent').torrentFiles(torrent);
|
||||
const storedFiles = await repository.getFiles(torrent)
|
||||
.then((files) => files.reduce((map, next) => (map[next.fileIndex] = next, map), {}));
|
||||
const imdbId = Object.values(storedFiles)[0].imdbId;
|
||||
|
||||
torrentFiles
|
||||
.filter((file) => !storedFiles[file.fileIndex])
|
||||
.map((file) => ({
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
title: file.name,
|
||||
size: file.size,
|
||||
imdbId: imdbId,
|
||||
imdbSeason: parse(file.name).season,
|
||||
imdbEpisode: parse(file.name).episode,
|
||||
// imdbSeason: parseInt(file.name.match(/(\d+)[ .]?-[ .]?\d+/)[1], 10),
|
||||
// imdbEpisode: parseInt(file.name.match(/\d+[ .]?-[ .]?(\d+)/)[1], 10),
|
||||
}))
|
||||
.forEach((file) => repository.createFile(file));
|
||||
}
|
||||
|
||||
async function updateMovieCollections() {
|
||||
const collectionFiles = await repository.getFilesBasedOnTitle('logy')
|
||||
.then(files => files.filter(file => file.fileIndex === null))
|
||||
.then(files => files.filter(file => parse(file.title).complete));
|
||||
|
||||
collectionFiles.map(original => repository.getTorrent({ infoHash: original.infoHash })
|
||||
.then(torrent => parseTorrentFiles({ ...torrent, imdbId: original.imdbId }))
|
||||
.then(files => Promise.all(files.map(file => {
|
||||
console.log(file);
|
||||
return repository.createFile(file)
|
||||
})))
|
||||
.then(createdFiled => {
|
||||
if (createdFiled && createdFiled.length) {
|
||||
console.log(`Updated movie collection ${original.title}`);
|
||||
repository.deleteFile(original)
|
||||
} else {
|
||||
console.log(`Failed updating movie collection ${original.title}`);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
async function reapplySeriesSeasonsSavedAsMovies() {
|
||||
return repository.getTorrentsBasedOnTitle('(?:[^a-zA-Z0-9]|^)[Ss][012]?[0-9](?:[^0-9]|$)', Type.MOVIE)
|
||||
.then(torrents => Promise.all(torrents
|
||||
.filter(torrent => parse(torrent.title).seasons)
|
||||
.map(torrent => limiter.schedule(() => reapplyEpisodeDecomposing(torrent.infoHash, false)
|
||||
.then(() => {
|
||||
torrent.type = Type.SERIES;
|
||||
return torrent.save();
|
||||
})))))
|
||||
.then(() => console.log('Finished updating multiple torrents'));
|
||||
}
|
||||
|
||||
async function reapplyEpisodeDecomposing(infoHash, includeSourceFiles = true) {
|
||||
const torrent = await repository.getTorrent({ infoHash });
|
||||
const storedFiles = await repository.getFiles({ infoHash });
|
||||
const fileIndexMap = storedFiles
|
||||
.reduce((map, next) => (map[next.fileIndex] = (map[next.fileIndex] || []).concat(next), map), {});
|
||||
const files = includeSourceFiles && Object.values(fileIndexMap)
|
||||
.map(sameIndexFiles => sameIndexFiles[0])
|
||||
.map(file => ({ fileIndex: file.fileIndex, name: file.title, path: file.title, size: file.size }));
|
||||
const imdbId = storedFiles[0].imdbId;
|
||||
|
||||
return parseTorrentFiles({ ...torrent, imdbId, files })
|
||||
.then(newFiles => newFiles.map(file => {
|
||||
if (fileIndexMap[file.fileIndex]) {
|
||||
const originalFile = fileIndexMap[file.fileIndex].shift();
|
||||
if (originalFile) {
|
||||
originalFile.imdbSeason = file.imdbSeason;
|
||||
originalFile.imdbEpisode = file.imdbEpisode;
|
||||
originalFile.kitsuId = file.kitsuId;
|
||||
originalFile.kitsuEpisode = file.kitsuEpisode;
|
||||
return originalFile;
|
||||
}
|
||||
}
|
||||
return file;
|
||||
}))
|
||||
.then(updatedFiles => Promise.all(updatedFiles
|
||||
.map(file => file.id ? file.save() : repository.createFile(file))))
|
||||
.then(() => console.log(`Updated files for ${torrent.title}`));
|
||||
}
|
||||
|
||||
async function findAllFiles() {
|
||||
/* Test cases */
|
||||
/* Anime Season and absolute episodes */
|
||||
const torrent = {
|
||||
infoHash: '6b95e5cfde9aaa71970a14f6bb6b9de19e2cbfa1',
|
||||
title: '[OMDA] Bleach + Filmes + Ovas (480p-720p x264 AAC-MP3) [rich_jc]',
|
||||
type: Type.SERIES,
|
||||
imdbId: 'tt0434665'
|
||||
};
|
||||
/* Season and concat episodes */
|
||||
// const torrent = {
|
||||
// infoHash: '235e8ed73b6cc9679b0842c39e17223c47b51f68',
|
||||
// title: 'Daria - The Complete Animated Series [2010] DVDRip',
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0118298'
|
||||
// };
|
||||
/* Series Season and absolute episodes */
|
||||
// const torrent = {
|
||||
// infoHash: '16b4560beb05397c0eeb35487a997caf789243ea',
|
||||
// title: 'Seinfeld - Complete Collection',
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0098904'
|
||||
// };
|
||||
/* Series Season and episodes */
|
||||
// const torrent = {
|
||||
// infoHash: 'd0f120c1bbfb988eb35b648e1c78ca3e5d45ef39',
|
||||
// title: 'Seinfeld Complete Series-720p WEBrip EN-SUB x264-[MULVAcoded]',
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0098904'
|
||||
// };
|
||||
/* Anime single absolute episode */
|
||||
// const torrent = {
|
||||
// infoHash: 'e81e12880980086c476aa8bfdd22bed9d41b1dfe',
|
||||
// title: '[Vision] Naruto Shippuuden - 451 (1080p x264 AAC) [rich_jc].mp4',
|
||||
// size: 467361138,
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0988824'
|
||||
// };
|
||||
/* Date based episode */
|
||||
// const torrent = {
|
||||
// infoHash: '5a8e9e64fa04e3541236f049cb6b0d35e4ca12cc',
|
||||
// title: 'Jimmy.Fallon.2020.02.14.Steve.Buscemi.WEB.x264-XLF[TGx]',
|
||||
// size: 618637331,
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt3444938'
|
||||
// };
|
||||
/* Not all seasons available so Date based episode */
|
||||
// const torrent = {
|
||||
// infoHash: 'DCD5ACF85F4203FE14428A890528B2EDBD07B092',
|
||||
// title: 'The Young And The Restless - S43 E10986 - 2016-08-12',
|
||||
// size: 989777743,
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0069658'
|
||||
// };
|
||||
// const torrent = {
|
||||
// infoHash: 'C75FBDCD62EB882746A0E58B19BADD60DE14526B',
|
||||
// title: 'Jimmy.Kimmel.2016.08.03.Hugh.Grant.480p.x264-mSD',
|
||||
// size: 618637331,
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0320037'
|
||||
// };
|
||||
/* With extras exceeding season episode count */
|
||||
// const torrent = {
|
||||
// infoHash: '2af56a80357b61d839328b986d1165ea8395bbc0',
|
||||
// title: 'Tim and Eric - Complete',
|
||||
// type: Type.SERIES,
|
||||
// imdbId: 'tt0912343'
|
||||
// };
|
||||
|
||||
return parseTorrentFiles(torrent)
|
||||
.then((files) => console.log(files));
|
||||
}
|
||||
|
||||
//addMissingEpisodes().then(() => console.log('Finished'));
|
||||
//findAllFiles().then(() => console.log('Finished'));
|
||||
//updateMovieCollections().then(() => console.log('Finished'));
|
||||
//reapplyEpisodeDecomposing('83b61caa4191469a9c15ee851aff828184f9a78d', false).then(() => console.log('Finished'));
|
||||
//reapplySeriesSeasonsSavedAsMovies().then(() => console.log('Finished'));
|
||||
174
scraper/scrapers/1337x/1337x_api.js
Normal file
174
scraper/scrapers/1337x/1337x_api.js
Normal file
@@ -0,0 +1,174 @@
|
||||
const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const Sugar = require('sugar-date');
|
||||
const decode = require('magnet-uri');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://1337x.to'
|
||||
];
|
||||
const defaultTimeout = 10000;
|
||||
|
||||
const Categories = {
|
||||
MOVIE: 'Movies',
|
||||
TV: 'TV',
|
||||
ANIME: 'Anime',
|
||||
DOCUMENTARIES: 'Documentaries',
|
||||
APPS: 'Apps',
|
||||
GAMES: 'Games',
|
||||
MUSIC: 'Music',
|
||||
PORN: 'XXX',
|
||||
OTHER: 'Other',
|
||||
};
|
||||
|
||||
function torrent(torrentId, config = {}, retries = 2) {
|
||||
if (!torrentId || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} query`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const slug = torrentId.startsWith('/torrent/') ? torrentId.replace('/torrent/', '') : torrentId;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${slug}`, config)))
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => ({ torrentId: slug, ...torrent }))
|
||||
.catch((err) => torrent(slug, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(keyword, config = {}, retries = 2) {
|
||||
if (!keyword || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${keyword} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/`, config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed browse request`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/cat/${category}/${page}/`, config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
|
||||
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (!body) {
|
||||
throw new Error(`No body: ${requestUrl}`);
|
||||
} else if (body.includes('502: Bad gateway') ||
|
||||
body.includes('403 Forbidden') ||
|
||||
!(body.includes('1337x</title>'))) {
|
||||
throw new Error(`Invalid body contents: ${requestUrl}`);
|
||||
}
|
||||
return body;
|
||||
});
|
||||
}
|
||||
|
||||
function parseTableBody(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const torrents = [];
|
||||
|
||||
$('.table > tbody > tr').each((i, element) => {
|
||||
const row = $(element);
|
||||
torrents.push({
|
||||
name: row.find('a').eq(1).text(),
|
||||
torrentId: row.find('a').eq(1).attr('href').replace('/torrent/', ''),
|
||||
seeders: parseInt(row.children('td.coll-2').text()),
|
||||
leechers: parseInt(row.children('td.coll-3').text()),
|
||||
size: parseSize(row.children('td.coll-4').text())
|
||||
});
|
||||
});
|
||||
|
||||
resolve(torrents);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTorrentPage(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const details = $('.torrent-detail-page');
|
||||
const magnetLink = details.find('a:contains(\'Magnet Download\')').attr('href');
|
||||
const imdbIdMatch = details.find('div[id=\'description\']').html().match(/imdb\.com\/title\/tt(\d+)/i);
|
||||
|
||||
const torrent = {
|
||||
name: decode(magnetLink).name.replace(/\+/g, ' '),
|
||||
infoHash: decode(magnetLink).infoHash,
|
||||
magnetLink: magnetLink,
|
||||
seeders: parseInt(details.find('strong:contains(\'Seeders\')').next().text(), 10),
|
||||
leechers: parseInt(details.find('strong:contains(\'Leechers\')').next().text(), 10),
|
||||
category: details.find('strong:contains(\'Category\')').next().text(),
|
||||
language: details.find('strong:contains(\'Language\')').next().text(),
|
||||
size: parseSize(details.find('strong:contains(\'Total size\')').next().text()),
|
||||
uploadDate: Sugar.Date.create(details.find('strong:contains(\'Date uploaded\')').next().text()),
|
||||
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
|
||||
files: details.find('div[id=\'files\']').first().find('li')
|
||||
.map((i, elem) => $(elem).text())
|
||||
.map((i, text) => ({
|
||||
fileIndex: i,
|
||||
name: text.match(/^(.+)\s\(.+\)$/)[1].replace(/^.+\//g, ''),
|
||||
path: text.match(/^(.+)\s\(.+\)$/)[1],
|
||||
size: parseSize(text.match(/^.+\s\((.+)\)$/)[1])
|
||||
})).get()
|
||||
};
|
||||
resolve(torrent);
|
||||
});
|
||||
}
|
||||
|
||||
function parseSize(sizeText) {
|
||||
if (!sizeText) {
|
||||
return undefined;
|
||||
}
|
||||
let scale = 1;
|
||||
if (sizeText.includes('GB')) {
|
||||
scale = 1024 * 1024 * 1024
|
||||
} else if (sizeText.includes('MB')) {
|
||||
scale = 1024 * 1024;
|
||||
} else if (sizeText.includes('KB')) {
|
||||
scale = 1024;
|
||||
}
|
||||
return Math.floor(parseFloat(sizeText) * scale);
|
||||
}
|
||||
|
||||
function raceFirstSuccessful(promises) {
|
||||
return Promise.all(promises.map((p) => {
|
||||
// If a request fails, count that as a resolution so it will keep
|
||||
// waiting for other possible successes. If a request succeeds,
|
||||
// treat it as a rejection so Promise.all immediately bails out.
|
||||
return p.then(
|
||||
(val) => Promise.reject(val),
|
||||
(err) => Promise.resolve(err)
|
||||
);
|
||||
})).then(
|
||||
// If '.all' resolved, we've just got an array of errors.
|
||||
(errors) => Promise.reject(errors),
|
||||
// If '.all' rejected, we've got the result we wanted.
|
||||
(val) => Promise.resolve(val)
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse, Categories };
|
||||
89
scraper/scrapers/1337x/1337x_scraper.js
Normal file
89
scraper/scrapers/1337x/1337x_scraper.js
Normal file
@@ -0,0 +1,89 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const leetx = require('./1337x_api');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const {
|
||||
createTorrentEntry,
|
||||
createSkipTorrentEntry,
|
||||
getStoredTorrentEntry,
|
||||
updateTorrentSeeders
|
||||
} = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = '1337x';
|
||||
const UNTIL_PAGE = 1;
|
||||
const TYPE_MAPPING = typeMapping();
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
const latestTorrents = await getLatestTorrents();
|
||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
||||
return repository.updateProvider(lastScrape);
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function getLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
leetx.Categories.MOVIE,
|
||||
leetx.Categories.TV,
|
||||
leetx.Categories.ANIME,
|
||||
leetx.Categories.DOCUMENTARIES
|
||||
];
|
||||
|
||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function getLatestTorrentsForCategory(category, page = 1) {
|
||||
return leetx.browse(({ category: category, page: page }))
|
||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
||||
: torrents)
|
||||
.catch(() => []);
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrentFound = await leetx.torrent(record.torrentId).catch(() => undefined);
|
||||
|
||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: torrentFound.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: torrentFound.torrentId,
|
||||
title: torrentFound.name.replace(/\t|\s+/g, ' '),
|
||||
seeders: torrentFound.seeders,
|
||||
size: torrentFound.size,
|
||||
type: TYPE_MAPPING[torrentFound.category],
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
imdbId: torrentFound.imdbId,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
function typeMapping() {
|
||||
const mapping = {};
|
||||
mapping[leetx.Categories.MOVIE] = Type.MOVIE;
|
||||
mapping[leetx.Categories.DOCUMENTARIES] = Type.MOVIE;
|
||||
mapping[leetx.Categories.TV] = Type.SERIES;
|
||||
mapping[leetx.Categories.ANIME] = Type.ANIME;
|
||||
return mapping;
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
136
scraper/scrapers/horriblesubs/horriblesubs_api.js
Normal file
136
scraper/scrapers/horriblesubs/horriblesubs_api.js
Normal file
@@ -0,0 +1,136 @@
|
||||
const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const moment = require('moment');
|
||||
|
||||
const defaultUrl = 'https://horriblesubs.info';
|
||||
const defaultTimeout = 10000;
|
||||
|
||||
function allShows(config = {}) {
|
||||
return _getContent('/shows', config)
|
||||
.then(($) => $('div[class="ind-show"]')
|
||||
.map((index, element) => $(element).children('a'))
|
||||
.map((index, element) => ({
|
||||
title: element.attr('title'),
|
||||
url: `${config.proxyUrl || defaultUrl}${element.attr('href')}`
|
||||
})).get());
|
||||
}
|
||||
|
||||
async function showData(showInfo, config = {}) {
|
||||
const showEndpoint = (showInfo.url || showInfo).match(/\/show.+/)[0];
|
||||
const title = showInfo.title;
|
||||
const showId = await _getShowId(showEndpoint);
|
||||
const packEntries = await _getShowEntries(showId, title, 'batch', config);
|
||||
const singleEntries = await _getShowEntries(showId, title, 'show', config);
|
||||
|
||||
return {
|
||||
title: title,
|
||||
url: showInfo.url || showInfo,
|
||||
showId: showId,
|
||||
singleEpisodes: singleEntries,
|
||||
packEpisodes: packEntries
|
||||
};
|
||||
}
|
||||
|
||||
async function getLatestEntries(config = {}) {
|
||||
return _getAllLatestEntries(config)
|
||||
.then((entries) => Promise.all(entries.map((entry) => _findLatestEntry(entry, config))))
|
||||
.then((entries) => entries.filter((entry) => entry))
|
||||
}
|
||||
|
||||
function _getContent(endpoint, config = {},) {
|
||||
const baseUrl = config.proxyUrl || defaultUrl;
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
const url = endpoint.startsWith('http')
|
||||
? endpoint.replace(/https?:\/\/[^/]+/, baseUrl)
|
||||
: `${baseUrl}${endpoint}`;
|
||||
|
||||
return needle('get', url, { open_timeout: timeout, follow: 2 })
|
||||
.then((response) => response.body)
|
||||
.then((body) => cheerio.load(body));
|
||||
}
|
||||
|
||||
function _getShowId(showEndpoint) {
|
||||
return _getContent(showEndpoint)
|
||||
.then($ => $('div.entry-content').find('script').html().match(/var hs_showid = (\d+)/)[1]);
|
||||
}
|
||||
|
||||
function _getShowEntries(animeId, animeTitle, type, config) {
|
||||
return _getAllEntries(animeId, type, config)
|
||||
.then((entries) => entries.filter((entry) => entry.title === animeTitle));
|
||||
}
|
||||
|
||||
function _getAllEntries(animeId, type, config, page = 0, autoExtend = true) {
|
||||
const entriesEndpoint = `/api.php?method=getshows&type=${type}&showid=${animeId}&nextid=${page}`;
|
||||
return _getEntries(entriesEndpoint, config)
|
||||
.then((entries) => !autoExtend || !entries.length ? entries :
|
||||
_getAllEntries(animeId, type, config, page + 1)
|
||||
.then((nextEntries) => entries.concat(nextEntries)));
|
||||
}
|
||||
|
||||
function _getEntries(endpoint, config) {
|
||||
return _getContent(endpoint, config)
|
||||
.then(($) => $('div[class="rls-info-container"]')
|
||||
.map((index, element) => ({
|
||||
title: $(element).find('a[class="rls-label"]').contents()
|
||||
.filter((i, el) => el.nodeType === 3).first().text().trim(),
|
||||
episode: $(element).find('a[class="rls-label"]').find('strong').text(),
|
||||
uploadDate: _parseDate($(element).find('a[class="rls-label"]').find('span[class="rls-date"]').text()),
|
||||
mirrors: $(element).find('div[class="rls-links-container"]').children()
|
||||
.map((indexLink, elementLink) => ({
|
||||
resolution: $(elementLink).attr('id').match(/\d+p$/)[0],
|
||||
magnetLink: $(elementLink).find('a[title="Magnet Link"]').attr('href'),
|
||||
torrentLink: $(elementLink).find('a[title="Torrent Link"]').attr('href')
|
||||
})).get()
|
||||
})).get());
|
||||
}
|
||||
|
||||
function _getAllLatestEntries(config, page = 0) {
|
||||
const pageParam = page === 0 ? '' : `&nextid=${page}`;
|
||||
const entriesEndpoint = `/api.php?method=getlatest${pageParam}`;
|
||||
return _getContent(entriesEndpoint, config)
|
||||
.then(($) => $('li a')
|
||||
.map((index, element) => ({
|
||||
urlEndpoint: $(element).attr('href'),
|
||||
episode: $(element).find('strong').text()
|
||||
})).get())
|
||||
.then((entries) => entries.length < 12
|
||||
? entries
|
||||
: _getAllLatestEntries(config, page + 1)
|
||||
.then((nextEntries) => entries.concat(nextEntries)));
|
||||
}
|
||||
|
||||
async function _findLatestEntry(entry, config) {
|
||||
const showId = await _getShowId(entry.urlEndpoint);
|
||||
let foundEntry;
|
||||
let page = 0;
|
||||
let reachedEnd = false;
|
||||
|
||||
while (!foundEntry && !reachedEnd) {
|
||||
const allEntries = await _getAllEntries(showId, 'show', config, page, false);
|
||||
foundEntry = allEntries.filter((e) => e.episode === entry.episode)[0];
|
||||
page = page + 1;
|
||||
reachedEnd = allEntries.length === 0;
|
||||
}
|
||||
|
||||
if (!foundEntry) {
|
||||
return;
|
||||
}
|
||||
return {
|
||||
title: foundEntry.title,
|
||||
url: entry.urlEndpoint,
|
||||
showId: showId,
|
||||
singleEpisodes: [foundEntry]
|
||||
};
|
||||
}
|
||||
|
||||
function _parseDate(date) {
|
||||
if (date.match(/today/i)) {
|
||||
return moment().toDate();
|
||||
} else if (date.match(/yesterday/i)) {
|
||||
return moment().subtract(1, 'day').toDate();
|
||||
}
|
||||
return moment(date, 'MM/DD/YYYY').toDate();
|
||||
}
|
||||
|
||||
module.exports = { allShows, showData, getLatestEntries, _getShowId };
|
||||
|
||||
5397
scraper/scrapers/horriblesubs/horriblesubs_mapping.json
Normal file
5397
scraper/scrapers/horriblesubs/horriblesubs_mapping.json
Normal file
File diff suppressed because it is too large
Load Diff
189
scraper/scrapers/horriblesubs/horriblesubs_scraper.js
Normal file
189
scraper/scrapers/horriblesubs/horriblesubs_scraper.js
Normal file
@@ -0,0 +1,189 @@
|
||||
const fs = require('fs');
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const decode = require('magnet-uri');
|
||||
const horriblesubs = require('./horriblesubs_api.js');
|
||||
const repository = require('../../lib/repository');
|
||||
const { Type } = require('../../lib/types');
|
||||
const { updateCurrentSeeders } = require('../../lib/torrent');
|
||||
const { parseTorrentFiles } = require('../../lib/torrentFiles');
|
||||
const { getMetadata, getKitsuId } = require('../../lib/metadata');
|
||||
const showMappings = require('./horriblesubs_mapping.json');
|
||||
|
||||
const NAME = 'HorribleSubs';
|
||||
const NEXT_FULL_SCRAPE_OFFSET = 3 * 24 * 60 * 60; // 3 days;
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 5 });
|
||||
const entryLimiter = new Bottleneck({ maxConcurrent: 10 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
const lastScraped = lastScrape.lastScraped && moment.unix(lastScrape.lastScraped);
|
||||
|
||||
if (!lastScraped || lastScraped.add(NEXT_FULL_SCRAPE_OFFSET, 'seconds') < scrapeStart) {
|
||||
console.log(`[${scrapeStart}] scrapping all ${NAME} shows...`);
|
||||
return _scrapeAllShows()
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
return repository.updateProvider(lastScrape);
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished scrapping all ${NAME} shows`));
|
||||
} else {
|
||||
console.log(`[${scrapeStart}] scrapping latest ${NAME} entries...`);
|
||||
return _scrapeLatestEntries()
|
||||
.then(() => console.log(`[${moment()}] finished scrapping latest ${NAME} entries`));
|
||||
}
|
||||
}
|
||||
|
||||
async function _scrapeLatestEntries() {
|
||||
const latestEntries = await horriblesubs.getLatestEntries();
|
||||
|
||||
return Promise.all(latestEntries
|
||||
.map((entryData) => limiter.schedule(() => _parseShowData(entryData)
|
||||
.catch((err) => console.log(err)))));
|
||||
}
|
||||
|
||||
async function _scrapeAllShows() {
|
||||
const shows = await horriblesubs.allShows();
|
||||
|
||||
return Promise.all(shows
|
||||
.map((show) => limiter.schedule(() => horriblesubs.showData(show)
|
||||
.then((showData) => _parseShowData(showData))
|
||||
.catch((err) => console.log(err)))));
|
||||
}
|
||||
|
||||
async function compareSearchKitsuIds() {
|
||||
console.log(`${NAME}: initiating kitsu compare...`);
|
||||
const shows = await horriblesubs.allShows()
|
||||
.then((shows) => Promise.all(shows.slice(0, 1).map((show) => limiter.schedule(() => enrichShow(show)))));
|
||||
|
||||
const incorrect = shows.filter(
|
||||
(show) => showMappings[show.title] && showMappings[show.title].kitsu_id !== show.kitsu_id);
|
||||
const incorrectRatio = incorrect.length / shows.length;
|
||||
console.log(incorrect);
|
||||
console.log(`Ratio: ${incorrectRatio}`);
|
||||
}
|
||||
|
||||
async function initMapping() {
|
||||
console.log(`${NAME}: initiating kitsu mapping...`);
|
||||
const shows = await horriblesubs.allShows()
|
||||
.then((shows) => shows.filter((show) => !showMappings[show.title]))
|
||||
.then((shows) => Promise.all(shows.map((show) => limiter.schedule(() => enrichShow(show)))))
|
||||
.then((shows) => shows.reduce((map, show) => (map[show.title] = show, map), showMappings));
|
||||
|
||||
fs.writeFile("./scrapers/horriblesubs/horriblesubs_mapping.json", JSON.stringify(shows), 'utf8', function (err) {
|
||||
if (err) {
|
||||
console.log("An error occurred while writing JSON Object to File.");
|
||||
} else {
|
||||
console.log(`${NAME}: finished kitsu mapping`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function enrichShow(show) {
|
||||
console.log(`${NAME}: getting show info for ${show.title}...`);
|
||||
const showId = await horriblesubs._getShowId(show.url)
|
||||
.catch(() => show.title);
|
||||
const metadata = await getKitsuId({ title: show.title })
|
||||
.then((kitsuId) => getMetadata(kitsuId))
|
||||
.catch((error) => {
|
||||
console.log(`Failed getting kitsu meta: ${error.message}`);
|
||||
return {};
|
||||
});
|
||||
|
||||
return {
|
||||
showId: showId,
|
||||
kitsu_id: metadata.kitsuId,
|
||||
...show,
|
||||
kitsuTitle: metadata.title,
|
||||
imdb_id: metadata.imdbId
|
||||
}
|
||||
}
|
||||
|
||||
async function _parseShowData(showData) {
|
||||
console.log(`${NAME}: scrapping ${showData.title} data...`);
|
||||
const showMapping = showMappings[showData.title];
|
||||
const kitsuId = showMapping && showMapping.kitsu_id;
|
||||
if (!showMapping) {
|
||||
throw new Error(`No kitsu mapping found for ${showData.title}`);
|
||||
}
|
||||
if (!kitsuId) {
|
||||
throw new Error(`No kitsuId found for ${showData.title}`);
|
||||
}
|
||||
|
||||
// sometimes horriblesubs entry contains multiple season in it, so need to split it per kitsu season entry
|
||||
const kitsuIdsMapping = Array.isArray(kitsuId) && await Promise.all(kitsuId.map(kitsuId => getMetadata(kitsuId)))
|
||||
.then((metas) => metas.reduce((map, meta) => {
|
||||
const epOffset = Object.keys(map).length;
|
||||
[...Array(meta.totalCount).keys()]
|
||||
.map(ep => ep + 1)
|
||||
.forEach(ep => map[ep + epOffset] = { kitsuId: meta.kitsuId, episode: ep, title: meta.title });
|
||||
return map;
|
||||
}, {})) || {};
|
||||
const formatTitle = (episodeInfo, mirror) => {
|
||||
const mapping = kitsuIdsMapping[episodeInfo.episode.replace(/^0+/, '')];
|
||||
if (mapping) {
|
||||
return `${mapping.title} - ${mapping.episode} [${mirror.resolution}]`;
|
||||
}
|
||||
return `${episodeInfo.title} - ${episodeInfo.episode} [${mirror.resolution}]`;
|
||||
};
|
||||
const getKitsuId = inputEpisode => {
|
||||
const episodeString = inputEpisode.includes('-') && inputEpisode.split('-')[0] || inputEpisode;
|
||||
const episode = parseInt(episodeString, 10);
|
||||
return kitsuIdsMapping[episode] && kitsuIdsMapping[episode].kitsuId || kitsuId;
|
||||
};
|
||||
|
||||
return Promise.all([].concat(showData.singleEpisodes).concat(showData.packEpisodes)
|
||||
.map((episodeInfo) => episodeInfo.mirrors
|
||||
.map((mirror) => ({
|
||||
provider: NAME,
|
||||
...mirror,
|
||||
infoHash: decode(mirror.magnetLink).infoHash,
|
||||
trackers: decode(mirror.magnetLink).tr.join(','),
|
||||
title: formatTitle(episodeInfo, mirror),
|
||||
size: 300000000,
|
||||
type: Type.ANIME,
|
||||
kitsuId: getKitsuId(episodeInfo.episode),
|
||||
uploadDate: episodeInfo.uploadDate,
|
||||
})))
|
||||
.reduce((a, b) => a.concat(b), [])
|
||||
.map((incompleteTorrent) => entryLimiter.schedule(() => checkIfExists(incompleteTorrent)
|
||||
.then((torrent) => torrent && updateCurrentSeeders(torrent))
|
||||
.then((torrent) => torrent && parseTorrentFiles(torrent)
|
||||
.then((files) => verifyFiles(torrent, files))
|
||||
.then((files) => repository.createTorrent(torrent)
|
||||
.then(() => files.forEach(file => repository.createFile(file)))
|
||||
.then(() => console.log(`Created entry for ${torrent.title}`)))))))
|
||||
.then(() => console.log(`${NAME}: finished scrapping ${showData.title} data`));
|
||||
}
|
||||
|
||||
async function verifyFiles(torrent, files) {
|
||||
if (files && files.length) {
|
||||
const existingFiles = await repository.getFiles({ infoHash: files[0].infoHash })
|
||||
.then((existing) => existing.reduce((map, file) => (map[file.fileIndex] = file, map), {}))
|
||||
.catch(() => undefined);
|
||||
if (existingFiles && Object.keys(existingFiles).length) {
|
||||
return files
|
||||
.map(file => ({
|
||||
...file,
|
||||
id: existingFiles[file.fileIndex] && existingFiles[file.fileIndex].id,
|
||||
size: existingFiles[file.fileIndex] && existingFiles[file.fileIndex].size || file.size
|
||||
}))
|
||||
}
|
||||
return files;
|
||||
}
|
||||
return Promise.reject(`No video files found for: ${torrent.title}`);
|
||||
}
|
||||
|
||||
async function checkIfExists(torrent) {
|
||||
const existingTorrent = await repository.getTorrent(torrent).catch(() => undefined);
|
||||
if (!existingTorrent) {
|
||||
return torrent; // no torrent exists yet
|
||||
} else if (existingTorrent.provider === NAME) {
|
||||
return undefined; // torrent by this provider already exists
|
||||
}
|
||||
return { ...torrent, size: existingTorrent.size, seeders: existingTorrent.seeders };
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
187
scraper/scrapers/kickass/kickass_api.js
Normal file
187
scraper/scrapers/kickass/kickass_api.js
Normal file
@@ -0,0 +1,187 @@
|
||||
const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const moment = require('moment');
|
||||
const decode = require('magnet-uri');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://katcr.co'
|
||||
];
|
||||
const defaultTimeout = 10000;
|
||||
|
||||
const Categories = {
|
||||
MOVIE: 'movies',
|
||||
TV: 'tv',
|
||||
ANIME: 'anime',
|
||||
APPS: 'applications',
|
||||
GAMES: 'games',
|
||||
MUSIC: 'music',
|
||||
BOOKS: 'books',
|
||||
PORN: 'xxx',
|
||||
OTHER: 'other',
|
||||
};
|
||||
|
||||
function torrent(torrentId, config = {}, retries = 2) {
|
||||
if (!torrentId || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => ({ torrentId, ...torrent }))
|
||||
.catch((err) => torrent(torrentId, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(keyword, config = {}, retries = 2) {
|
||||
if (!keyword || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${keyword} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed browse request`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 1;
|
||||
const category = config.category;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/category/${category}/page/${page}`, config)))
|
||||
.then((body) => parseTableBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
|
||||
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (!body) {
|
||||
throw new Error(`No body: ${requestUrl}`);
|
||||
} else if (body.includes('Access Denied')) {
|
||||
console.log(`Access Denied: ${requestUrl}`);
|
||||
throw new Error(`Access Denied: ${requestUrl}`);
|
||||
} else if (body.includes('502: Bad gateway') ||
|
||||
body.includes('403 Forbidden') ||
|
||||
body.includes('Origin DNS error') ||
|
||||
!body.includes('Kickass Torrents</title>')) {
|
||||
throw new Error(`Invalid body contents: ${requestUrl}`);
|
||||
}
|
||||
return body;
|
||||
});
|
||||
}
|
||||
|
||||
function parseTableBody(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const torrents = [];
|
||||
|
||||
$('.table > tbody > tr').each((i, element) => {
|
||||
const row = $(element);
|
||||
torrents.push({
|
||||
torrentId: row.find('a[class="torrents_table__torrent_title"]').first().attr('href').replace('/torrent/', ''),
|
||||
name: row.find('a[class="torrents_table__torrent_title"]').first().children('b').text(),
|
||||
category: row.find('span[class="torrents_table__upload_info"]').first().children('a').first().attr('href')
|
||||
.match(/category\/([^\/]+)/)[1],
|
||||
seeders: parseInt(row.find('td[data-title="Seed"]').first().text()),
|
||||
leechers: parseInt(row.find('td[data-title="Leech"]').first().text()),
|
||||
size: parseSize(row.find('td[data-title="Size"]').first().text()),
|
||||
uploadDate: moment(row.find('td[data-title="Age"]').first().attr('title')).toDate()
|
||||
});
|
||||
});
|
||||
|
||||
resolve(torrents);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTorrentPage(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
const content = $('div[class="col"]').first();
|
||||
const info = content.find('div[class="torrent_stats"]').parent();
|
||||
const description = content.find('div[id="main"]');
|
||||
const magnetLink = info.find('a[title="Download verified Magnet"]').attr('href');
|
||||
const imdbIdMatch = description.html().match(/imdb\.com\/title\/tt(\d+)/i);
|
||||
|
||||
const torrent = {
|
||||
name: info.find('h1').first().text(),
|
||||
infoHash: decode(magnetLink).infoHash,
|
||||
magnetLink: magnetLink,
|
||||
seeders: parseInt(info.find('span[class="torrent_stats__seed_count mr-2"]').first().text().match(/\d+/)[0], 10),
|
||||
leechers: parseInt(info.find('span[class="torrent_stats__leech_count mr-2"]').first().text().match(/\d+/)[0], 10),
|
||||
category: info.find('small').first().children('a').first().attr('href').match(/\/category\/([^\/]+)/)[1],
|
||||
language: description.find('span:contains(\'Audio\')').next().children().eq(0).text(),
|
||||
size: parseSize(description.find('ul[class="file_list"]').first().find('li').first().contents().eq(2).text()
|
||||
.match(/\(Size: (.+)\)/)[1]),
|
||||
uploadDate: moment(info.find('time').first().text()).toDate(),
|
||||
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`,
|
||||
files: content.find('ul[class="file_list"]').first().find('li > ul > li[class="file_list__file"]')
|
||||
.map((i, elem) => $(elem))
|
||||
.map((i, ele) => ({
|
||||
fileIndex: i,
|
||||
name: ele.find('span > ul > li').contents().eq(1).text().trim().replace(/^.+\//g, ''),
|
||||
path: ele.find('span > ul > li').contents().eq(1).text().trim(),
|
||||
size: parseSize(ele.contents().eq(2).text())
|
||||
})).get()
|
||||
};
|
||||
if (torrent.files.length >= 50) {
|
||||
// a max of 50 files are displayed on the page
|
||||
delete torrent.files;
|
||||
}
|
||||
resolve(torrent);
|
||||
});
|
||||
}
|
||||
|
||||
function parseSize(sizeText) {
|
||||
if (!sizeText) {
|
||||
return undefined;
|
||||
}
|
||||
let scale = 1;
|
||||
if (sizeText.includes('GB')) {
|
||||
scale = 1024 * 1024 * 1024
|
||||
} else if (sizeText.includes('MB')) {
|
||||
scale = 1024 * 1024;
|
||||
} else if (sizeText.includes('KB') || sizeText.includes('kB')) {
|
||||
scale = 1024;
|
||||
}
|
||||
return Math.floor(parseFloat(sizeText.replace(/[',]/g, '')) * scale);
|
||||
}
|
||||
|
||||
function raceFirstSuccessful(promises) {
|
||||
return Promise.all(promises.map((p) => {
|
||||
// If a request fails, count that as a resolution so it will keep
|
||||
// waiting for other possible successes. If a request succeeds,
|
||||
// treat it as a rejection so Promise.all immediately bails out.
|
||||
return p.then(
|
||||
(val) => Promise.reject(val),
|
||||
(err) => Promise.resolve(err)
|
||||
);
|
||||
})).then(
|
||||
// If '.all' resolved, we've just got an array of errors.
|
||||
(errors) => Promise.reject(errors),
|
||||
// If '.all' rejected, we've got the result we wanted.
|
||||
(val) => Promise.resolve(val)
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse, Categories };
|
||||
98
scraper/scrapers/kickass/kickass_dump_scraper.js
Normal file
98
scraper/scrapers/kickass/kickass_dump_scraper.js
Normal file
@@ -0,0 +1,98 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const LineByLineReader = require('line-by-line');
|
||||
const fs = require('fs');
|
||||
const { Type } = require('../../lib/types');
|
||||
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'KickassTorrents';
|
||||
const CSV_FILE_PATH = '/tmp/kickass.csv';
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
console.log(`starting to scrape KAT dump: ${JSON.stringify(lastDump)}`);
|
||||
|
||||
let entriesProcessed = 0;
|
||||
const lr = new LineByLineReader(CSV_FILE_PATH);
|
||||
lr.on('line', (line) => {
|
||||
if (entriesProcessed % 1000 === 0) {
|
||||
console.log(`Processed ${entriesProcessed} entries`);
|
||||
}
|
||||
const row = line.match(/(?<=^|\|)(".*"|[^|]+)(?=\||$)/g);
|
||||
if (row.length !== 11) {
|
||||
console.log(`Invalid row: ${line}`);
|
||||
return;
|
||||
}
|
||||
const torrent = {
|
||||
infoHash: row[0].toLowerCase(),
|
||||
title: row[1]
|
||||
.replace(/^"|"$/g, '')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/&\w{2,6};/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim(),
|
||||
category: row[2],
|
||||
size: parseInt(row[5], 10),
|
||||
seeders: parseInt(row[8], 10),
|
||||
uploadDate: moment.unix(parseInt(row[10], 10)).toDate(),
|
||||
};
|
||||
|
||||
if (!limiter.empty()) {
|
||||
lr.pause()
|
||||
}
|
||||
|
||||
limiter.schedule(() => processTorrentRecord(torrent)
|
||||
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
|
||||
.then(() => limiter.empty())
|
||||
.then((empty) => empty && lr.resume())
|
||||
.then(() => entriesProcessed++);
|
||||
});
|
||||
lr.on('error', (err) => {
|
||||
console.log(err);
|
||||
});
|
||||
lr.on('end', () => {
|
||||
fs.unlink(CSV_FILE_PATH);
|
||||
console.log(`finished to scrape KAT dump: ${JSON.stringify(lastDump)}!`);
|
||||
});
|
||||
}
|
||||
|
||||
const categoryMapping = {
|
||||
"Movies": Type.MOVIE,
|
||||
"TV": Type.SERIES,
|
||||
"Anime": Type.ANIME
|
||||
};
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (!categoryMapping[record.category] || record.seeders === 0) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const torrentFound = await findTorrent(record).catch(() => undefined);
|
||||
|
||||
if (!torrentFound) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: record.infoHash,
|
||||
provider: NAME,
|
||||
title: torrentFound.name,
|
||||
size: record.size,
|
||||
type: categoryMapping[record.category],
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: record.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
async function findTorrent(record) {
|
||||
return Promise.reject("not found");
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
87
scraper/scrapers/kickass/kickass_scraper.js
Normal file
87
scraper/scrapers/kickass/kickass_scraper.js
Normal file
@@ -0,0 +1,87 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const kickass = require('./kickass_api');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const {
|
||||
createTorrentEntry,
|
||||
createSkipTorrentEntry,
|
||||
getStoredTorrentEntry,
|
||||
updateTorrentSeeders
|
||||
} = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'KickassTorrents';
|
||||
const UNTIL_PAGE = 1;
|
||||
const TYPE_MAPPING = typeMapping();
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
const latestTorrents = await getLatestTorrents();
|
||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
||||
return repository.updateProvider(lastScrape);
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function getLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
kickass.Categories.MOVIE,
|
||||
kickass.Categories.TV,
|
||||
kickass.Categories.ANIME,
|
||||
];
|
||||
|
||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function getLatestTorrentsForCategory(category, page = 1) {
|
||||
return kickass.browse(({ category, page }))
|
||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
||||
: torrents)
|
||||
.catch(() => []);
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrentFound = await kickass.torrent(record.torrentId).catch(() => undefined);
|
||||
|
||||
if (!torrentFound || !TYPE_MAPPING[torrentFound.category]) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: torrentFound.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: torrentFound.torrentId,
|
||||
title: torrentFound.name.replace(/\t|\s+/g, ' '),
|
||||
size: torrentFound.size,
|
||||
type: TYPE_MAPPING[torrentFound.category],
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
function typeMapping() {
|
||||
const mapping = {};
|
||||
mapping[kickass.Categories.MOVIE] = Type.MOVIE;
|
||||
mapping[kickass.Categories.TV] = Type.SERIES;
|
||||
mapping[kickass.Categories.ANIME] = Type.ANIME;
|
||||
return mapping;
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
76
scraper/scrapers/rarbg/rarbg_dump_scraper.js
Normal file
76
scraper/scrapers/rarbg/rarbg_dump_scraper.js
Normal file
@@ -0,0 +1,76 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const rarbg = require('rarbg-api');
|
||||
const decode = require('magnet-uri');
|
||||
const { Type } = require('../../lib/types');
|
||||
const {
|
||||
createTorrentEntry,
|
||||
getStoredTorrentEntry,
|
||||
updateTorrentSeeders
|
||||
} = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'RARBG';
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 });
|
||||
const entryLimiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
console.log(`[${moment()}] starting ${NAME} dump scrape...`);
|
||||
const movieImdbIds = require('./rargb_movie_imdb_ids_2020-03-09.json');
|
||||
const seriesImdbIds = require('./rargb_series_imdb_ids_2020-03-09.json');
|
||||
const allImdbIds = [].concat(movieImdbIds).concat(seriesImdbIds);
|
||||
|
||||
return Promise.all(allImdbIds.map(imdbId => limiter.schedule(() => getTorrentsForImdbId(imdbId))
|
||||
.then(torrents => Promise.all(torrents.map(t => entryLimiter.schedule(() => processTorrentRecord(t)))))))
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} dump scrape`));
|
||||
}
|
||||
|
||||
async function getTorrentsForImdbId(imdbId) {
|
||||
return rarbg.search(imdbId, { limit: 100, sort: 'seeders', format: 'json_extended', ranked: 0 }, 'imdb')
|
||||
.then(torrents => torrents.map(torrent => ({
|
||||
name: torrent.title,
|
||||
infoHash: decode(torrent.download).infoHash,
|
||||
magnetLink: torrent.download,
|
||||
seeders: torrent.seeders,
|
||||
leechers: torrent.leechers,
|
||||
category: torrent.category,
|
||||
size: torrent.size,
|
||||
uploadDate: new Date(torrent.pubdate),
|
||||
imdbId: torrent.episode_info && torrent.episode_info.imdb
|
||||
})))
|
||||
.catch((err) => []);
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
provider: NAME,
|
||||
infoHash: record.infoHash,
|
||||
title: record.name,
|
||||
type: getType(record.category),
|
||||
seeders: record.seeders,
|
||||
size: record.size,
|
||||
uploadDate: record.uploadDate,
|
||||
imdbId: record.imdbId
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
const seriesCategories = [
|
||||
'TV Episodes',
|
||||
'Movies/TV-UHD-episodes',
|
||||
'TV HD Episodes',
|
||||
];
|
||||
|
||||
function getType(category) {
|
||||
if (seriesCategories.includes(category)) {
|
||||
return Type.SERIES;
|
||||
}
|
||||
return Type.MOVIE;
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
103
scraper/scrapers/rarbg/rarbg_scraper.js
Normal file
103
scraper/scrapers/rarbg/rarbg_scraper.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const rarbg = require('rarbg-api');
|
||||
const decode = require('magnet-uri');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const {
|
||||
createTorrentEntry,
|
||||
getStoredTorrentEntry,
|
||||
updateTorrentSeeders
|
||||
} = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'RARBG';
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 1, minTime: 2500 });
|
||||
const entryLimiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
const latestTorrents = await getLatestTorrents();
|
||||
return Promise.all(latestTorrents.map(torrent => entryLimiter.schedule(() => processTorrentRecord(torrent))))
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
||||
return repository.updateProvider(lastScrape);
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function getLatestTorrents() {
|
||||
const allowedCategories = [
|
||||
rarbg.CATEGORY['4K_MOVIES_X264_4k'],
|
||||
rarbg.CATEGORY['4K_X265_4k'],
|
||||
rarbg.CATEGORY['4k_X264_4k_HDR'],
|
||||
rarbg.CATEGORY.MOVIES_XVID,
|
||||
rarbg.CATEGORY.MOVIES_XVID_720P,
|
||||
rarbg.CATEGORY.MOVIES_X264,
|
||||
rarbg.CATEGORY.MOVIES_X264_1080P,
|
||||
rarbg.CATEGORY.MOVIES_X264_720P,
|
||||
rarbg.CATEGORY.MOVIES_X264_3D,
|
||||
rarbg.CATEGORY.MOVIES_FULL_BD,
|
||||
rarbg.CATEGORY.MOVIES_BD_REMUX,
|
||||
rarbg.CATEGORY.TV_EPISODES,
|
||||
rarbg.CATEGORY.TV_UHD_EPISODES,
|
||||
rarbg.CATEGORY.TV_HD_EPISODES
|
||||
];
|
||||
|
||||
return Promise.all(allowedCategories.map(category => limiter.schedule(() => getLatestTorrentsForCategory(category))))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function getLatestTorrentsForCategory(category) {
|
||||
return rarbg.list({ category: category, limit: 100, sort: 'last', format: 'json_extended', ranked: 0 })
|
||||
.then(torrents => torrents.map(torrent => ({
|
||||
name: torrent.title,
|
||||
infoHash: decode(torrent.download).infoHash,
|
||||
magnetLink: torrent.download,
|
||||
seeders: torrent.seeders,
|
||||
leechers: torrent.leechers,
|
||||
category: torrent.category,
|
||||
size: torrent.size,
|
||||
uploadDate: new Date(torrent.pubdate),
|
||||
imdbId: torrent.episode_info && torrent.episode_info.imdb
|
||||
})))
|
||||
.catch((err) => []);
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
provider: NAME,
|
||||
infoHash: record.infoHash,
|
||||
title: record.name,
|
||||
type: getType(record.category),
|
||||
seeders: record.seeders,
|
||||
size: record.size,
|
||||
uploadDate: record.uploadDate,
|
||||
imdbId: record.imdbId
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
const seriesCategories = [
|
||||
'TV Episodes',
|
||||
'Movies/TV-UHD-episodes',
|
||||
'TV HD Episodes',
|
||||
];
|
||||
|
||||
function getType(category) {
|
||||
if (seriesCategories.includes(category)) {
|
||||
return Type.SERIES;
|
||||
}
|
||||
return Type.MOVIE;
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
41038
scraper/scrapers/rarbg/rargb_movie_imdb_ids_2020-03-09.json
Normal file
41038
scraper/scrapers/rarbg/rargb_movie_imdb_ids_2020-03-09.json
Normal file
File diff suppressed because it is too large
Load Diff
6081
scraper/scrapers/rarbg/rargb_series_imdb_ids_2020-03-09.json
Normal file
6081
scraper/scrapers/rarbg/rargb_series_imdb_ids_2020-03-09.json
Normal file
File diff suppressed because it is too large
Load Diff
250
scraper/scrapers/thepiratebay/thepiratebay_api.js
Normal file
250
scraper/scrapers/thepiratebay/thepiratebay_api.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const cheerio = require('cheerio');
|
||||
const needle = require('needle');
|
||||
const moment = require('moment');
|
||||
const decode = require('magnet-uri');
|
||||
|
||||
const defaultProxies = [
|
||||
'https://thepiratebay.org',
|
||||
'https://piratebays.icu',
|
||||
'https://piratebays.cool',
|
||||
'https://piratebays.life'];
|
||||
const dumpUrl = '/static/dump/csv/';
|
||||
const defaultTimeout = 10000;
|
||||
|
||||
const Categories = {
|
||||
AUDIO: {
|
||||
ALL: 100,
|
||||
MUSIC: 101,
|
||||
AUDIO_BOOKS: 102,
|
||||
SOUND_CLIPS: 103,
|
||||
FLAC: 104,
|
||||
OTHER: 199
|
||||
},
|
||||
VIDEO: {
|
||||
ALL: 200,
|
||||
MOVIES: 201,
|
||||
MOVIES_DVDR: 202,
|
||||
MUSIC_VIDEOS: 203,
|
||||
MOVIE_CLIPS: 204,
|
||||
TV_SHOWS: 205,
|
||||
HANDHELD: 206,
|
||||
MOVIES_HD: 207,
|
||||
TV_SHOWS_HD: 208,
|
||||
MOVIES_3D: 209,
|
||||
OTHER: 299
|
||||
},
|
||||
APPS: {
|
||||
ALL: 300,
|
||||
WINDOWS: 301,
|
||||
MAC: 302,
|
||||
UNIX: 303,
|
||||
HANDHELD: 304,
|
||||
IOS: 305,
|
||||
ANDROID: 306,
|
||||
OTHER_OS: 399
|
||||
},
|
||||
GAMES: {
|
||||
ALL: 400,
|
||||
PC: 401,
|
||||
MAC: 402,
|
||||
PSx: 403,
|
||||
XBOX360: 404,
|
||||
Wii: 405,
|
||||
HANDHELD: 406,
|
||||
IOS: 407,
|
||||
ANDROID: 408,
|
||||
OTHER: 499
|
||||
},
|
||||
PORN: {
|
||||
ALL: 500,
|
||||
MOVIES: 501,
|
||||
MOVIES_DVDR: 502,
|
||||
PICTURES: 503,
|
||||
GAMES: 504,
|
||||
MOVIES_HD: 505,
|
||||
MOVIE_CLIPS: 506,
|
||||
OTHER: 599
|
||||
},
|
||||
OTHER: {
|
||||
ALL: 600,
|
||||
E_BOOKS: 601,
|
||||
COMICS: 602,
|
||||
PICTURES: 603,
|
||||
COVERS: 604,
|
||||
PHYSIBLES: 605,
|
||||
OTHER: 699
|
||||
}
|
||||
};
|
||||
|
||||
function torrent(torrentId, config = {}, retries = 2) {
|
||||
if (!torrentId || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${torrentId} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/torrent/${torrentId}`, config)))
|
||||
.then((body) => parseTorrentPage(body))
|
||||
.then((torrent) => ({ torrentId, ...torrent }))
|
||||
.catch((err) => torrent(torrentId, config, retries - 1));
|
||||
}
|
||||
|
||||
function search(keyword, config = {}, retries = 2) {
|
||||
if (!keyword || retries === 0) {
|
||||
return Promise.reject(new Error(`Failed ${keyword} search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 0;
|
||||
const category = config.category || 0;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/search/${keyword}/${page}/99/${category}`, config)))
|
||||
.then((body) => parseBody(body))
|
||||
.catch((err) => search(keyword, config, retries - 1));
|
||||
}
|
||||
|
||||
function browse(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed browse request`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
const page = config.page || 0;
|
||||
const category = config.category || 0;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}/browse/${category}/${page}`, config)))
|
||||
.then((body) => parseBody(body))
|
||||
.catch((err) => browse(config, retries - 1));
|
||||
}
|
||||
|
||||
function dumps(config = {}, retries = 2) {
|
||||
if (retries === 0) {
|
||||
return Promise.reject(new Error(`Failed dump search`));
|
||||
}
|
||||
const proxyList = config.proxyList || defaultProxies;
|
||||
|
||||
return raceFirstSuccessful(proxyList
|
||||
.map((proxyUrl) => singleRequest(`${proxyUrl}${dumpUrl}`, config)
|
||||
.then((body) => body.match(/(<a href="[^"]+">[^<]+<\/a>.+\d)/g)
|
||||
.map((group) => ({
|
||||
url: `${proxyUrl}${dumpUrl}` + group.match(/<a href="([^"]+)">/)[1],
|
||||
updatedAt: moment(group.match(/\s+([\w-]+\s+[\d:]+)\s+\d+$/)[1], 'DD-MMM-YYYY HH:mm').toDate()
|
||||
})))))
|
||||
.catch(() => dumps(config, retries - 1));
|
||||
}
|
||||
|
||||
function singleRequest(requestUrl, config = {}) {
|
||||
const timeout = config.timeout || defaultTimeout;
|
||||
|
||||
return needle('get', requestUrl, { open_timeout: timeout, follow: 2 })
|
||||
.then((response) => {
|
||||
const body = response.body;
|
||||
if (!body) {
|
||||
throw new Error(`No body: ${requestUrl}`);
|
||||
} else if (body.includes('Access Denied') && !body.includes('<title>The Pirate Bay')) {
|
||||
console.log(`Access Denied: ${requestUrl}`);
|
||||
throw new Error(`Access Denied: ${requestUrl}`);
|
||||
} else if (body.includes('502: Bad gateway') ||
|
||||
body.includes('403 Forbidden') ||
|
||||
body.includes('Database maintenance') ||
|
||||
body.includes('Origin DNS error') ||
|
||||
!(body.includes('<title>The Pirate Bay') || body.includes('TPB</title>') || body.includes(dumpUrl))) {
|
||||
throw new Error(`Invalid body contents: ${requestUrl}`);
|
||||
}
|
||||
return body;
|
||||
});
|
||||
}
|
||||
|
||||
function parseBody(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
|
||||
const torrents = [];
|
||||
|
||||
$('table[id=\'searchResult\'] tr').each(function () {
|
||||
const name = $(this).find('.detLink').text();
|
||||
const sizeMatcher = $(this).find('.detDesc').text().match(/(?:,\s?Size\s)(.+),/);
|
||||
if (!name || !sizeMatcher) {
|
||||
return;
|
||||
}
|
||||
torrents.push({
|
||||
torrentId: $(this).find('.detLink').attr('href').match(/torrent\/([^/]+)/)[1],
|
||||
name: name,
|
||||
seeders: parseInt($(this).find('td[align=\'right\']').eq(0).text(), 10),
|
||||
leechers: parseInt($(this).find('td[align=\'right\']').eq(1).text(), 10),
|
||||
magnetLink: $(this).find('a[title=\'Download this torrent using magnet\']').attr('href'),
|
||||
category: parseInt($(this).find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0],
|
||||
10),
|
||||
subcategory: parseInt($(this).find('a[title=\'More from this category\']').eq(1).attr('href').match(/\d+$/)[0],
|
||||
10),
|
||||
size: parseSize(sizeMatcher[1])
|
||||
});
|
||||
});
|
||||
resolve(torrents);
|
||||
});
|
||||
}
|
||||
|
||||
function parseTorrentPage(body) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const $ = cheerio.load(body);
|
||||
|
||||
if (!$) {
|
||||
reject(new Error('Failed loading body'));
|
||||
}
|
||||
const details = $('div[id=\'details\']');
|
||||
const col1 = details.find('dl[class=\'col1\']');
|
||||
const imdbIdMatch = col1.html().match(/imdb\.com\/title\/tt(\d+)/i);
|
||||
|
||||
const torrent = {
|
||||
name: $('div[id=\'title\']').text().trim(),
|
||||
seeders: parseInt(details.find('dt:contains(\'Seeders:\')').next().text(), 10),
|
||||
leechers: parseInt(details.find('dt:contains(\'Leechers:\')').next().text(), 10),
|
||||
magnetLink: details.find('a[title=\'Get this torrent\']').attr('href'),
|
||||
infoHash: decode(details.find('a[title=\'Get this torrent\']').attr('href')).infoHash,
|
||||
category: Categories.VIDEO.ALL,
|
||||
subcategory: parseInt(col1.find('a[title=\'More from this category\']').eq(0).attr('href').match(/\d+$/)[0], 10),
|
||||
size: parseSize(details.find('dt:contains(\'Size:\')').next().text().match(/(\d+)(?:.?Bytes)/)[1]),
|
||||
uploadDate: new Date(details.find('dt:contains(\'Uploaded:\')').next().text()),
|
||||
imdbId: imdbIdMatch && `tt${imdbIdMatch[1].padStart(7, '0')}`
|
||||
};
|
||||
resolve(torrent);
|
||||
});
|
||||
}
|
||||
|
||||
function parseSize(sizeText) {
|
||||
if (!sizeText) {
|
||||
return undefined;
|
||||
}
|
||||
let scale = 1;
|
||||
if (sizeText.includes('GiB')) {
|
||||
scale = 1024 * 1024 * 1024
|
||||
} else if (sizeText.includes('MiB')) {
|
||||
scale = 1024 * 1024;
|
||||
} else if (sizeText.includes('KiB')) {
|
||||
scale = 1024;
|
||||
}
|
||||
return Math.floor(parseFloat(sizeText) * scale);
|
||||
}
|
||||
|
||||
function raceFirstSuccessful(promises) {
|
||||
return Promise.all(promises.map((p) => {
|
||||
// If a request fails, count that as a resolution so it will keep
|
||||
// waiting for other possible successes. If a request succeeds,
|
||||
// treat it as a rejection so Promise.all immediately bails out.
|
||||
return p.then(
|
||||
(val) => Promise.reject(val),
|
||||
(err) => Promise.resolve(err)
|
||||
);
|
||||
})).then(
|
||||
// If '.all' resolved, we've just got an array of errors.
|
||||
(errors) => Promise.reject(errors),
|
||||
// If '.all' rejected, we've got the result we wanted.
|
||||
(val) => Promise.resolve(val)
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { torrent, search, browse, dumps, Categories };
|
||||
181
scraper/scrapers/thepiratebay/thepiratebay_dump_scraper.js
Normal file
181
scraper/scrapers/thepiratebay/thepiratebay_dump_scraper.js
Normal file
@@ -0,0 +1,181 @@
|
||||
const moment = require('moment');
|
||||
const needle = require('needle');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const { ungzip } = require('node-gzip');
|
||||
const LineByLineReader = require('line-by-line');
|
||||
const fs = require('fs');
|
||||
const thepiratebay = require('./thepiratebay_api.js');
|
||||
const bing = require('nodejs-bing');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'ThePirateBay';
|
||||
const CSV_FILE_PATH = '/tmp/tpb_dump.csv';
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
const lastScraped = await repository.getProvider({ name: NAME });
|
||||
const lastDump = { updatedAt: 2147000000 };
|
||||
//const checkPoint = moment('2016-06-17 00:00:00', 'YYYY-MMM-DD HH:mm:ss').toDate();
|
||||
//const lastDump = await thepiratebay.dumps().then((dumps) => dumps.sort((a, b) => b.updatedAt - a.updatedAt)[0]);
|
||||
|
||||
if (!lastScraped.lastScraped || lastScraped.lastScraped < lastDump.updatedAt) {
|
||||
console.log(`starting to scrape tpb dump: ${JSON.stringify(lastDump)}`);
|
||||
await downloadDump(lastDump);
|
||||
|
||||
let entriesProcessed = 0;
|
||||
const lr = new LineByLineReader(CSV_FILE_PATH);
|
||||
lr.on('line', (line) => {
|
||||
if (line.includes("#ADDED")) {
|
||||
return;
|
||||
}
|
||||
if (entriesProcessed % 1000 === 0) {
|
||||
console.log(`Processed ${entriesProcessed} entries`);
|
||||
}
|
||||
const row = line.match(/(?<=^|;)(".*"|[^;]+)(?=;|$)/g);
|
||||
if (row.length !== 4) {
|
||||
console.log(`Invalid row: ${line}`);
|
||||
return;
|
||||
}
|
||||
const torrent = {
|
||||
uploadDate: moment(row[0], 'YYYY-MMM-DD HH:mm:ss').toDate(),
|
||||
infoHash: Buffer.from(row[1], 'base64').toString('hex'),
|
||||
title: row[2]
|
||||
.replace(/^"|"$/g, '')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/&\w{2,6};/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim(),
|
||||
size: parseInt(row[3], 10)
|
||||
};
|
||||
|
||||
// if (torrent.uploadDate > checkPoint) {
|
||||
// entriesProcessed++;
|
||||
// return;
|
||||
// }
|
||||
|
||||
if (lastScraped.lastScraped && lastScraped.lastScraped > torrent.uploadDate) {
|
||||
// torrent was already scraped previously, skipping
|
||||
return;
|
||||
}
|
||||
|
||||
if (!limiter.empty()) {
|
||||
lr.pause()
|
||||
}
|
||||
|
||||
limiter.schedule(() => processTorrentRecord(torrent)
|
||||
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
|
||||
.then(() => limiter.empty())
|
||||
.then((empty) => empty && lr.resume())
|
||||
.then(() => entriesProcessed++);
|
||||
});
|
||||
lr.on('error', (err) => {
|
||||
console.log(err);
|
||||
});
|
||||
lr.on('end', () => {
|
||||
fs.unlink(CSV_FILE_PATH, (error) => console.warn(error));
|
||||
//repository.updateProvider({ name: NAME, lastScraped: lastDump.updatedAt });
|
||||
console.log(`finished to scrape tpb dump: ${JSON.stringify(lastDump)}!`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const allowedCategories = [
|
||||
thepiratebay.Categories.VIDEO.MOVIES,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_HD,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_3D,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
const seriesCategories = [
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const torrentFound = await findTorrent(record);
|
||||
|
||||
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: record.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: record.torrentId,
|
||||
title: torrentFound.name,
|
||||
size: torrentFound.size,
|
||||
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: torrentFound.uploadDate || record.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
async function findTorrent(record) {
|
||||
return findTorrentInSource(record)
|
||||
.catch(() => findTorrentViaBing(record));
|
||||
}
|
||||
|
||||
async function findTorrentInSource(record) {
|
||||
let page = 0;
|
||||
let torrentFound;
|
||||
while (!torrentFound && page < 5) {
|
||||
const torrents = await thepiratebay.search(record.title.replace(/[\W\s]+/, ' '), { page: page });
|
||||
torrentFound = torrents.filter(torrent => torrent.magnetLink.toLowerCase().includes(record.infoHash))[0];
|
||||
page = torrents.length === 0 ? 1000 : page + 1;
|
||||
}
|
||||
if (!torrentFound) {
|
||||
return Promise.reject(new Error(`Failed to find torrent ${record.title}`));
|
||||
}
|
||||
return Promise.resolve(torrentFound)
|
||||
.then((torrent) => thepiratebay.torrent(torrent.torrentId));
|
||||
}
|
||||
|
||||
async function findTorrentViaBing(record) {
|
||||
return bing.web(`${record.infoHash}`)
|
||||
.then((results) => results
|
||||
.find(result => result.description.includes('Direct download via magnet link') ||
|
||||
result.description.includes('Get this torrent')))
|
||||
.then((result) => {
|
||||
if (!result) {
|
||||
throw new Error(`Failed to find torrent ${record.title}`);
|
||||
}
|
||||
return result.link.match(/torrent\/(\w+)\//)[1];
|
||||
})
|
||||
.then((torrentId) => thepiratebay.torrent(torrentId))
|
||||
}
|
||||
|
||||
function downloadDump(dump) {
|
||||
try {
|
||||
if (fs.existsSync(CSV_FILE_PATH)) {
|
||||
console.log('dump file already exist...');
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
console.log('downloading dump file...');
|
||||
return needle('get', dump.url, { open_timeout: 2000, output: '/tmp/tpb_dump.gz' })
|
||||
.then((response) => response.body)
|
||||
.then((body) => {
|
||||
console.log('unzipping dump file...');
|
||||
return ungzip(body);
|
||||
})
|
||||
.then((unzipped) => {
|
||||
console.log('writing dump file...');
|
||||
return fs.promises.writeFile(CSV_FILE_PATH, unzipped);
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
85
scraper/scrapers/thepiratebay/thepiratebay_scraper.js
Normal file
85
scraper/scrapers/thepiratebay/thepiratebay_scraper.js
Normal file
@@ -0,0 +1,85 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const thepiratebay = require('./thepiratebay_api.js');
|
||||
const { Type } = require('../../lib/types');
|
||||
const repository = require('../../lib/repository');
|
||||
const {
|
||||
createTorrentEntry,
|
||||
createSkipTorrentEntry,
|
||||
getStoredTorrentEntry,
|
||||
updateTorrentSeeders
|
||||
} = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'ThePirateBay';
|
||||
const UNTIL_PAGE = 20;
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
const allowedCategories = [
|
||||
thepiratebay.Categories.VIDEO.MOVIES,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_HD,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_3D,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
const seriesCategories = [
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
|
||||
async function scrape() {
|
||||
const scrapeStart = moment();
|
||||
const lastScrape = await repository.getProvider({ name: NAME });
|
||||
console.log(`[${scrapeStart}] starting ${NAME} scrape...`);
|
||||
|
||||
const latestTorrents = await getLatestTorrents();
|
||||
return Promise.all(latestTorrents.map(torrent => limiter.schedule(() => processTorrentRecord(torrent))))
|
||||
.then(() => {
|
||||
lastScrape.lastScraped = scrapeStart;
|
||||
lastScrape.lastScrapedId = latestTorrents.length && latestTorrents[latestTorrents.length - 1].torrentId;
|
||||
return repository.updateProvider(lastScrape);
|
||||
})
|
||||
.then(() => console.log(`[${moment()}] finished ${NAME} scrape`));
|
||||
}
|
||||
|
||||
async function getLatestTorrents() {
|
||||
return Promise.all(allowedCategories.map(category => getLatestTorrentsForCategory(category)))
|
||||
.then(entries => entries.reduce((a, b) => a.concat(b), []));
|
||||
}
|
||||
|
||||
async function getLatestTorrentsForCategory(category, page = 0) {
|
||||
return thepiratebay.browse(({ category, page }))
|
||||
.then(torrents => torrents.length && page < UNTIL_PAGE
|
||||
? getLatestTorrents(category, page + 1).then(nextTorrents => torrents.concat(nextTorrents))
|
||||
: torrents)
|
||||
.catch(() => []);
|
||||
}
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return updateTorrentSeeders(record);
|
||||
}
|
||||
|
||||
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
|
||||
|
||||
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: torrentFound.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: torrentFound.torrentId,
|
||||
title: torrentFound.name.replace(/\t|\s+/g, ' '),
|
||||
size: torrentFound.size,
|
||||
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
@@ -0,0 +1,112 @@
|
||||
const moment = require('moment');
|
||||
const Bottleneck = require('bottleneck');
|
||||
const LineByLineReader = require('line-by-line');
|
||||
const fs = require('fs');
|
||||
const decode = require('magnet-uri');
|
||||
const thepiratebay = require('./thepiratebay_api.js');
|
||||
const { Type } = require('../../lib/types');
|
||||
const { createTorrentEntry, createSkipTorrentEntry, getStoredTorrentEntry } = require('../../lib/torrentEntries');
|
||||
|
||||
const NAME = 'ThePirateBay';
|
||||
const CSV_FILE_PATH = '/tmp/tpb.csv';
|
||||
|
||||
const limiter = new Bottleneck({ maxConcurrent: 40 });
|
||||
|
||||
async function scrape() {
|
||||
console.log(`starting to scrape tpb dump...`);
|
||||
//const checkPoint = moment('2013-06-16 00:00:00', 'YYYY-MMM-DD HH:mm:ss').toDate();
|
||||
const checkPoint = 4115000;
|
||||
|
||||
let entriesProcessed = 0;
|
||||
const lr = new LineByLineReader(CSV_FILE_PATH);
|
||||
lr.on('line', (line) => {
|
||||
if (entriesProcessed % 1000 === 0) {
|
||||
console.log(`Processed ${entriesProcessed} entries`);
|
||||
}
|
||||
if (entriesProcessed <= checkPoint) {
|
||||
entriesProcessed++;
|
||||
return;
|
||||
}
|
||||
|
||||
const row = line.match(/(?<=^|,)(".*"|[^,]*)(?=,|$)/g);
|
||||
if (row.length !== 10) {
|
||||
console.log(`Invalid row: ${line}`);
|
||||
return;
|
||||
}
|
||||
const torrent = {
|
||||
torrentId: row[0],
|
||||
title: row[1]
|
||||
.replace(/^"|"$/g, '')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/&\w{2,6};/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim(),
|
||||
size: parseInt(row[2], 10),
|
||||
category: row[4],
|
||||
subcategory: row[5],
|
||||
infoHash: row[7].toLowerCase() || decode(row[9]).infoHash,
|
||||
magnetLink: row[9],
|
||||
uploadDate: moment(row[8]).toDate(),
|
||||
};
|
||||
|
||||
if (!limiter.empty()) {
|
||||
lr.pause()
|
||||
}
|
||||
|
||||
limiter.schedule(() => processTorrentRecord(torrent)
|
||||
.catch((error) => console.log(`failed ${torrent.title} due: ${error}`)))
|
||||
.then(() => limiter.empty())
|
||||
.then((empty) => empty && lr.resume())
|
||||
.then(() => entriesProcessed++);
|
||||
});
|
||||
lr.on('error', (err) => {
|
||||
console.log(err);
|
||||
});
|
||||
lr.on('end', () => {
|
||||
console.log(`finished to scrape tpb dump!`);
|
||||
});
|
||||
}
|
||||
|
||||
const allowedCategories = [
|
||||
thepiratebay.Categories.VIDEO.MOVIES,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_HD,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_DVDR,
|
||||
thepiratebay.Categories.VIDEO.MOVIES_3D,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
const seriesCategories = [
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS,
|
||||
thepiratebay.Categories.VIDEO.TV_SHOWS_HD
|
||||
];
|
||||
|
||||
async function processTorrentRecord(record) {
|
||||
if (record.category !== 'Video') {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
if (await getStoredTorrentEntry(record)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const torrentFound = await thepiratebay.torrent(record.torrentId).catch(() => undefined);
|
||||
|
||||
if (!torrentFound || !allowedCategories.includes(torrentFound.subcategory)) {
|
||||
return createSkipTorrentEntry(record);
|
||||
}
|
||||
|
||||
const torrent = {
|
||||
infoHash: record.infoHash,
|
||||
provider: NAME,
|
||||
torrentId: record.torrentId,
|
||||
title: torrentFound.name,
|
||||
size: torrentFound.size,
|
||||
type: seriesCategories.includes(torrentFound.subcategory) ? Type.SERIES : Type.MOVIE,
|
||||
imdbId: torrentFound.imdbId,
|
||||
uploadDate: torrentFound.uploadDate,
|
||||
seeders: torrentFound.seeders,
|
||||
};
|
||||
|
||||
return createTorrentEntry(torrent);
|
||||
}
|
||||
|
||||
module.exports = { scrape };
|
||||
Reference in New Issue
Block a user