Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.

Changed from page scraping to rss xml scraping
Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours.
Simplifies a lot of torrentio to deal with new data
This commit is contained in:
iPromKnight
2024-02-01 16:38:45 +00:00
parent 6fb4ddcf23
commit ab17ef81be
255 changed files with 18489 additions and 69074 deletions

View File

@@ -0,0 +1,3 @@
**/node_modules
**/npm-debug.log
**/.env

17
src/node/addon/Dockerfile Normal file
View File

@@ -0,0 +1,17 @@
FROM node:16-alpine
RUN apk update && apk upgrade && \
apk add --no-cache nodejs npm git curl && \
rm -fr /var/cache/apk/*
WORKDIR /app
COPY package*.json .
RUN npm ci --only-production
COPY . .
EXPOSE 7000
CMD ["/usr/bin/node", "--insecure-http-parser", "/app/index.js" ]

126
src/node/addon/addon.js Normal file
View File

@@ -0,0 +1,126 @@
import Bottleneck from 'bottleneck';
import { addonBuilder } from 'stremio-addon-sdk';
import { Type } from './lib/types.js';
import { dummyManifest } from './lib/manifest.js';
import { cacheWrapStream } from './lib/cache.js';
import { toStreamInfo, applyStaticInfo } from './lib/streamInfo.js';
import * as repository from './lib/repository.js';
import applySorting from './lib/sort.js';
import { applyMochs, getMochCatalog, getMochItemMeta } from './moch/moch.js';
import StaticLinks from './moch/static.js';
const CACHE_MAX_AGE = parseInt(process.env.CACHE_MAX_AGE) || 60 * 60; // 1 hour in seconds
const CACHE_MAX_AGE_EMPTY = 60; // 60 seconds
const CATALOG_CACHE_MAX_AGE = 20 * 60; // 20 minutes
const STALE_REVALIDATE_AGE = 4 * 60 * 60; // 4 hours
const STALE_ERROR_AGE = 7 * 24 * 60 * 60; // 7 days
const builder = new addonBuilder(dummyManifest());
const limiter = new Bottleneck({
maxConcurrent: process.env.LIMIT_MAX_CONCURRENT || 200,
highWater: process.env.LIMIT_QUEUE_SIZE || 220,
strategy: Bottleneck.strategy.OVERFLOW
});
builder.defineStreamHandler((args) => {
if (!args.id.match(/tt\d+/i) && !args.id.match(/kitsu:\d+/i)) {
return Promise.resolve({ streams: [] });
}
return cacheWrapStream(args.id, () => limiter.schedule(() =>
streamHandler(args)
.then(records => records
.sort((a, b) => b.torrent.seeders - a.torrent.seeders || b.torrent.uploadDate - a.torrent.uploadDate)
.map(record => toStreamInfo(record)))))
.then(streams => applySorting(streams, args.extra))
.then(streams => applyStaticInfo(streams))
.then(streams => applyMochs(streams, args.extra))
.then(streams => enrichCacheParams(streams))
.catch(error => {
console.log(`Failed request ${args.id}: ${error}`);
return Promise.reject(`Failed request ${args.id}: ${error}`);
});
});
builder.defineCatalogHandler((args) => {
const mochKey = args.id.replace("selfhostio-", '');
console.log(`Incoming catalog ${args.id} request with skip=${args.extra.skip || 0}`)
return getMochCatalog(mochKey, args.extra)
.then(metas => ({
metas: metas,
cacheMaxAge: CATALOG_CACHE_MAX_AGE
}))
.catch(error => {
return Promise.reject(`Failed retrieving catalog ${args.id}: ${JSON.stringify(error)}`);
});
})
builder.defineMetaHandler((args) => {
const [mochKey, metaId] = args.id.split(':');
console.log(`Incoming debrid meta ${args.id} request`)
return getMochItemMeta(mochKey, metaId, args.extra)
.then(meta => ({
meta: meta,
cacheMaxAge: metaId === 'Downloads' ? 0 : CACHE_MAX_AGE
}))
.catch(error => {
return Promise.reject(`Failed retrieving catalog meta ${args.id}: ${JSON.stringify(error)}`);
});
})
async function streamHandler(args) {
if (args.type === Type.MOVIE) {
return movieRecordsHandler(args);
} else if (args.type === Type.SERIES) {
return seriesRecordsHandler(args);
}
return Promise.reject('not supported type');
}
async function seriesRecordsHandler(args) {
if (args.id.match(/^tt\d+:\d+:\d+$/)) {
const parts = args.id.split(':');
const imdbId = parts[0];
const season = parts[1] !== undefined ? parseInt(parts[1], 10) : 1;
const episode = parts[2] !== undefined ? parseInt(parts[2], 10) : 1;
return repository.getImdbIdSeriesEntries(imdbId, season, episode);
} else if (args.id.match(/^kitsu:\d+(?::\d+)?$/i)) {
const parts = args.id.split(':');
const kitsuId = parts[1];
const episode = parts[2] !== undefined ? parseInt(parts[2], 10) : undefined;
return episode !== undefined
? repository.getKitsuIdSeriesEntries(kitsuId, episode)
: repository.getKitsuIdMovieEntries(kitsuId);
}
return Promise.resolve([]);
}
async function movieRecordsHandler(args) {
if (args.id.match(/^tt\d+$/)) {
const parts = args.id.split(':');
const imdbId = parts[0];
console.log("imdbId", imdbId);
return repository.getImdbIdMovieEntries(imdbId);
} else if (args.id.match(/^kitsu:\d+(?::\d+)?$/i)) {
return seriesRecordsHandler(args);
}
return Promise.resolve([]);
}
function enrichCacheParams(streams) {
let cacheAge = CACHE_MAX_AGE;
if (!streams.length) {
cacheAge = CACHE_MAX_AGE_EMPTY;
} else if (streams.every(stream => stream?.url?.endsWith(StaticLinks.FAILED_ACCESS))) {
cacheAge = 0;
}
return {
streams: streams,
cacheMaxAge: cacheAge,
staleRevalidate: STALE_REVALIDATE_AGE,
staleError: STALE_ERROR_AGE
}
}
export default builder.getInterface();

14
src/node/addon/index.js Normal file
View File

@@ -0,0 +1,14 @@
import express from 'express';
import serverless from './serverless.js';
import { initBestTrackers } from './lib/magnetHelper.js';
import {ipFilter} from "./lib/ipFilter.js";
const app = express();
app.enable('trust proxy');
app.use(ipFilter);
app.use(express.static('static', { maxAge: '1y' }));
app.use((req, res, next) => serverless(req, res, next));
app.listen(process.env.PORT || 7000, () => {
initBestTrackers()
.then(() => console.log(`Started addon at: http://localhost:${process.env.PORT || 7000}`));
});

103
src/node/addon/lib/cache.js Normal file
View File

@@ -0,0 +1,103 @@
import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb';
import { isStaticUrl } from '../moch/static.js';
const GLOBAL_KEY_PREFIX = 'selfhostio-addon';
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
const AVAILABILITY_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|availability`;
const RESOLVED_URL_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|resolved`;
const STREAM_TTL = process.env.STREAM_TTL || 60 * 60; // 1 Hour
const STREAM_EMPTY_TTL = process.env.STREAM_EMPTY_TTL || 30; // 1 minute
const AVAILABILITY_TTL = 8 * 60 * 60; // 8 hours
const AVAILABILITY_EMPTY_TTL = 30 * 60; // 30 minutes
const MESSAGE_VIDEO_URL_TTL = 60; // 1 minutes
// When the streams are empty we want to cache it for less time in case of timeouts or failures
const MONGO_URI = process.env.MONGODB_URI;
const NO_CACHE = process.env.NO_CACHE || false;
const memoryCache = initiateMemoryCache();
const remoteCache = initiateRemoteCache();
function initiateRemoteCache() {
if (NO_CACHE) {
return null;
} else if (MONGO_URI) {
return cacheManager.caching({
store: mangodbStore,
uri: MONGO_URI,
options: {
collection: 'selfhostio_addon_collection',
socketTimeoutMS: 120000,
useNewUrlParser: true,
useUnifiedTopology: false,
ttl: STREAM_EMPTY_TTL
},
ttl: STREAM_EMPTY_TTL,
ignoreCacheErrors: true
});
} else {
return cacheManager.caching({
store: 'memory',
ttl: STREAM_EMPTY_TTL
});
}
}
function initiateMemoryCache() {
return cacheManager.caching({
store: 'memory',
ttl: MESSAGE_VIDEO_URL_TTL,
max: Infinity // infinite LRU cache size
});
}
function cacheWrap(cache, key, method, options) {
if (NO_CACHE || !cache) {
return method();
}
return cache.wrap(key, method, options);
}
export function cacheWrapStream(id, method) {
return cacheWrap(remoteCache, `${STREAM_KEY_PREFIX}:${id}`, method, {
ttl: (streams) => streams.length ? STREAM_TTL : STREAM_EMPTY_TTL
});
}
export function cacheWrapResolvedUrl(id, method) {
return cacheWrap(memoryCache, `${RESOLVED_URL_KEY_PREFIX}:${id}`, method, {
ttl: (url) => isStaticUrl(url) ? MESSAGE_VIDEO_URL_TTL : STREAM_TTL
});
}
export function cacheAvailabilityResults(results) {
Object.keys(results)
.forEach(infoHash => {
const key = `${AVAILABILITY_KEY_PREFIX}:${infoHash}`;
const value = results[infoHash];
const ttl = value?.length ? AVAILABILITY_TTL : AVAILABILITY_EMPTY_TTL;
memoryCache.set(key, value, { ttl })
});
return results;
}
export function getCachedAvailabilityResults(infoHashes) {
const keys = infoHashes.map(infoHash => `${AVAILABILITY_KEY_PREFIX}:${infoHash}`)
return new Promise(resolve => {
memoryCache.mget(...keys, (error, result) => {
if (error) {
console.log('Failed retrieve availability cache', error)
return resolve({});
}
const availabilityResults = {};
infoHashes.forEach((infoHash, index) => {
if (result[index]) {
availabilityResults[infoHash] = result[index];
}
});
resolve(availabilityResults);
})
});
}

View File

@@ -0,0 +1,32 @@
import { DebridOptions } from '../moch/options.js';
import { QualityFilter, Providers, SizeFilter } from './filter.js';
import { LanguageOptions } from './languages.js';
const keysToSplit = [Providers.key, LanguageOptions.key, QualityFilter.key, SizeFilter.key, DebridOptions.key];
const keysToUppercase = [SizeFilter.key];
export function parseConfiguration(configuration) {
if (!configuration) {
return undefined;
}
const configValues = configuration.split('|')
.reduce((map, next) => {
const parameterParts = next.split('=');
if (parameterParts.length === 2) {
map[parameterParts[0].toLowerCase()] = parameterParts[1];
}
return map;
}, {});
keysToSplit
.filter(key => configValues[key])
.forEach(key => configValues[key] = configValues[key].split(',')
.map(value => keysToUppercase.includes(key) ? value.toUpperCase() : value.toLowerCase()))
return configValues;
}
function configValue(config) {
return Object.entries(config)
.map(([key, value]) => `${key}=${Array.isArray(value) ? value.join(',') : value}`)
.join('|');
}

View File

@@ -0,0 +1,72 @@
const VIDEO_EXTENSIONS = [
"3g2",
"3gp",
"avi",
"flv",
"mkv",
"mk3d",
"mov",
"mp2",
"mp4",
"m4v",
"mpe",
"mpeg",
"mpg",
"mpv",
"webm",
"wmv",
"ogm",
"ts",
"m2ts"
];
const SUBTITLE_EXTENSIONS = [
"aqt",
"gsub",
"jss",
"sub",
"ttxt",
"pjs",
"psb",
"rt",
"smi",
"slt",
"ssf",
"srt",
"ssa",
"ass",
"usf",
"idx",
"vtt"
];
const DISK_EXTENSIONS = [
"iso",
"m2ts",
"ts",
"vob"
]
const ARCHIVE_EXTENSIONS = [
"rar",
"zip"
]
export function isVideo(filename) {
return isExtension(filename, VIDEO_EXTENSIONS);
}
export function isSubtitle(filename) {
return isExtension(filename, SUBTITLE_EXTENSIONS);
}
export function isDisk(filename) {
return isExtension(filename, DISK_EXTENSIONS);
}
export function isArchive(filename) {
return isExtension(filename, ARCHIVE_EXTENSIONS);
}
export function isExtension(filename, extensions) {
const extensionMatch = filename?.match(/\.(\w{2,4})$/);
return extensionMatch && extensions.includes(extensionMatch[1].toLowerCase());
}

View File

@@ -0,0 +1,168 @@
import { extractProvider, parseSize, extractSize } from './titleHelper.js';
import { Type } from './types.js';
export const Providers = {
key: 'providers',
options: [
{
key: 'YTS',
label: 'YTS'
},
{
key: 'EZTV',
label: 'EZTV'
},
{
key: 'DMM',
label: 'DMM'
},
{
key: 'TPB',
label: 'TPB'
},
{
key: 'TorrentGalaxy',
label: 'TorrentGalaxy'
}
]
};
export const QualityFilter = {
key: 'qualityfilter',
options: [
{
key: 'brremux',
label: 'BluRay REMUX',
test(quality, bingeGroup) {
return bingeGroup?.includes(this.label);
}
},
{
key: 'hdrall',
label: 'HDR/HDR10+/Dolby Vision',
items: ['HDR', 'HDR10+', 'DV'],
test(quality) {
const hdrProfiles = quality?.split(' ')?.slice(1)?.join() || '';
return this.items.some(hdrType => hdrProfiles.includes(hdrType));
}
},
{
key: 'dolbyvision',
label: 'Dolby Vision',
test(quality) {
const hdrProfiles = quality?.split(' ')?.slice(1)?.join() || '';
return hdrProfiles === 'DV';
}
},
{
key: '4k',
label: '4k',
items: ['4k'],
test(quality) {
return quality && this.items.includes(quality.split(' ')[0]);
}
},
{
key: '1080p',
label: '1080p',
items: ['1080p'],
test(quality) {
return this.items.includes(quality)
}
},
{
key: '720p',
label: '720p',
items: ['720p'],
test(quality) {
return this.items.includes(quality)
}
},
{
key: '480p',
label: '480p',
items: ['480p'],
test(quality) {
return this.items.includes(quality)
}
},
{
key: 'other',
label: 'Other (DVDRip/HDRip/BDRip...)',
// could be ['DVDRip', 'HDRip', 'BDRip', 'BRRip', 'BluRay', 'WEB-DL', 'WEBRip', 'HDTV', 'DivX', 'XviD']
items: ['4k', '1080p', '720p', '480p', 'SCR', 'CAM', 'TeleSync', 'TeleCine'],
test(quality) {
return quality && !this.items.includes(quality.split(' ')[0]);
}
},
{
key: 'scr',
label: 'Screener',
items: ['SCR'],
test(quality) {
return this.items.includes(quality)
}
},
{
key: 'cam',
label: 'Cam',
items: ['CAM', 'TeleSync', 'TeleCine'],
test(quality) {
return this.items.includes(quality)
}
},
{
key: 'unknown',
label: 'Unknown',
test(quality) {
return !quality
}
}
]
};
export const SizeFilter = {
key: 'sizefilter'
}
const defaultProviderKeys = Providers.options.map(provider => provider.key);
export default function applyFilters(streams, config) {
return [
filterByProvider,
filterByQuality,
filterBySize
].reduce((filteredStreams, filter) => filter(filteredStreams, config), streams);
}
function filterByProvider(streams, config) {
const providers = config.providers || defaultProviderKeys;
if (!providers?.length) {
return streams;
}
return streams.filter(stream => {
const provider = extractProvider(stream.title)
return providers.includes(provider);
})
}
function filterByQuality(streams, config) {
const filters = config[QualityFilter.key];
if (!filters) {
return streams;
}
const filterOptions = QualityFilter.options.filter(option => filters.includes(option.key));
return streams.filter(stream => {
const streamQuality = stream.name.split('\n')[1];
const bingeGroup = stream.behaviorHints?.bingeGroup;
return !filterOptions.some(option => option.test(streamQuality, bingeGroup));
});
}
function filterBySize(streams, config) {
const sizeFilters = config[SizeFilter.key];
if (!sizeFilters?.length) {
return streams;
}
const sizeLimit = parseSize(config.type === Type.MOVIE ? sizeFilters.shift() : sizeFilters.pop());
return streams.filter(stream => {
const size = extractSize(stream.title)
return size <= sizeLimit;
})
}

View File

@@ -0,0 +1,49 @@
import fs from 'fs';
import path from 'path';
import requestIp from 'request-ip';
import ip from 'ip';
const filePath = path.join(process.cwd(), 'allowed_ips.json');
let ALLOWED_ADDRESSES = [];
let ALLOWED_SUBNETS = [];
if (fs.existsSync(filePath)) {
const allowedAddresses = JSON.parse(fs.readFileSync(filePath, 'utf8'));
allowedAddresses.forEach(address => {
if (address.indexOf('/') === -1) {
ALLOWED_ADDRESSES.push(address);
} else {
ALLOWED_SUBNETS.push(address);
}
});
}
const IpIsAllowed = function(ipAddress) {
if (ALLOWED_ADDRESSES.indexOf(ipAddress) > -1) {
return true;
}
for (let i = 0; i < ALLOWED_SUBNETS.length; i++) {
if (ip.cidrSubnet(ALLOWED_SUBNETS[i]).contains(ipAddress)) {
return true;
}
}
return false;
};
export const ipFilter = function (req, res, next) {
const ipAddress = requestIp.getClientIp(req);
if (ALLOWED_ADDRESSES.length === 0 && ALLOWED_SUBNETS.length === 0) {
return next();
}
if (IpIsAllowed(ipAddress)) {
return next();
} else {
console.log(`IP ${ipAddress} is not allowed`);
res.status(404).send(null);
}
};

View File

@@ -0,0 +1,486 @@
const STYLESHEET = `
* {
box-sizing: border-box;
}
body,
html {
margin: 0;
padding: 0;
width: 100%;
height: 100%
}
html {
background-size: auto 100%;
background-size: cover;
background-position: center center;
background-repeat: repeat-y;
}
body {
display: flex;
background-color: transparent;
font-family: 'Open Sans', Arial, sans-serif;
color: white;
}
h1 {
font-size: 4.5vh;
font-weight: 700;
}
h2 {
font-size: 2.2vh;
font-weight: normal;
font-style: italic;
opacity: 0.8;
}
h3 {
font-size: 2.2vh;
}
h1,
h2,
h3,
p,
label {
margin: 0;
text-shadow: 0 0 1vh rgba(0, 0, 0, 0.15);
}
p {
font-size: 1.75vh;
}
ul {
font-size: 1.75vh;
margin: 0;
margin-top: 1vh;
padding-left: 3vh;
}
a {
color: green
}
a.install-link {
text-decoration: none
}
.install-button {
border: 0;
outline: 0;
color: white;
background: #8A5AAB;
padding: 1.2vh 3.5vh;
margin: auto;
text-align: center;
font-family: 'Open Sans', Arial, sans-serif;
font-size: 2.2vh;
font-weight: 600;
cursor: pointer;
display: block;
box-shadow: 0 0.5vh 1vh rgba(0, 0, 0, 0.2);
transition: box-shadow 0.1s ease-in-out;
}
.install-button:hover {
box-shadow: none;
}
.install-button:active {
box-shadow: 0 0 0 0.5vh white inset;
}
#addon {
width: 90vh;
margin: auto;
padding-left: 10%;
padding-right: 10%;
background: rgba(0, 0, 0, 0.60);
}
.logo {
height: 14vh;
width: 14vh;
margin: auto;
margin-bottom: 3vh;
}
.logo img {
width: 100%;
}
.name, .version {
display: inline-block;
vertical-align: top;
}
.name {
line-height: 5vh;
}
.version {
position: absolute;
line-height: 5vh;
margin-left: 1vh;
opacity: 0.8;
}
.contact {
left: 0;
bottom: 4vh;
width: 100%;
margin-top: 1vh;
text-align: center;
}
.contact a {
font-size: 1.4vh;
font-style: italic;
}
.separator {
margin-bottom: 4vh;
}
.label {
font-size: 2.2vh;
font-weight: 600;
padding: 0;
line-height: inherit;
}
.btn-group, .multiselect-container {
width: 100%;
}
.btn {
text-align: left;
}
.multiselect-container {
border: 0;
border-radius: 0;
}
.input, .btn {
width: 100%;
margin: auto;
margin-bottom: 10px;
padding: 6px 12px;
border: 0;
border-radius: 0;
outline: 0;
color: #333;
background-color: rgb(255, 255, 255);
box-shadow: 0 0.5vh 1vh rgba(0, 0, 0, 0.2);
}
.input:focus, .btn:focus {
outline: none;
box-shadow: 0 0 0 2pt rgb(30, 144, 255, 0.7);
}
`;
import { Providers, QualityFilter, SizeFilter } from './filter.js';
import { SortOptions } from './sort.js';
import { LanguageOptions } from './languages.js';
import { DebridOptions } from '../moch/options.js';
import { MochOptions } from '../moch/moch.js';
export default function landingTemplate(manifest, config = {}) {
const providers = config[Providers.key] || Providers.options.map(provider => provider.key);
const sort = config[SortOptions.key] || SortOptions.options.qualitySeeders.key;
const languages = config[LanguageOptions.key] || [];
const qualityFilters = config[QualityFilter.key] || [];
const sizeFilter = (config[SizeFilter.key] || []).join(',');
const limit = config.limit || '';
const debridProvider = Object.keys(MochOptions).find(mochKey => config[mochKey]);
const debridOptions = config[DebridOptions.key] || [];
const realDebridApiKey = config[MochOptions.realdebrid.key] || '';
const premiumizeApiKey = config[MochOptions.premiumize.key] || '';
const allDebridApiKey = config[MochOptions.alldebrid.key] || '';
const debridLinkApiKey = config[MochOptions.debridlink.key] || '';
const offcloudApiKey = config[MochOptions.offcloud.key] || '';
const putioKey = config[MochOptions.putio.key] || '';
const putioClientId = putioKey.replace(/@.*/, '');
const putioToken = putioKey.replace(/.*@/, '');
const background = manifest.background || 'https://dl.strem.io/addon-background.jpg';
const logo = manifest.logo || 'https://dl.strem.io/addon-logo.png';
const providersHTML = Providers.options
.map(provider => `<option value="${provider.key}">${provider.foreign ? provider.foreign + ' ' : ''}${provider.label}</option>`)
.join('\n');
const sortOptionsHTML = Object.values(SortOptions.options)
.map((option, i) => `<option value="${option.key}" ${i === 0 ? 'selected' : ''}>${option.description}</option>`)
.join('\n');
const languagesOptionsHTML = LanguageOptions.options
.map((option, i) => `<option value="${option.key}">${option.label}</option>`)
.join('\n');
const qualityFiltersHTML = Object.values(QualityFilter.options)
.map(option => `<option value="${option.key}">${option.label}</option>`)
.join('\n');
const debridProvidersHTML = Object.values(MochOptions)
.map(moch => `<option value="${moch.key}">${moch.name}</option>`)
.join('\n');
const debridOptionsHTML = Object.values(DebridOptions.options)
.map(option => `<option value="${option.key}">${option.description}</option>`)
.join('\n');
const stylizedTypes = manifest.types
.map(t => t[0].toUpperCase() + t.slice(1) + (t !== 'series' ? 's' : ''));
return `
<!DOCTYPE html>
<html style="background-image: url(${background});">
<head>
<meta charset="utf-8">
<title>${manifest.name} - Stremio Addon</title>
<link rel="shortcut icon" href="${logo}" type="image/x-icon">
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,600,700&display=swap" rel="stylesheet">
<script src="https://code.jquery.com/jquery-3.6.4.slim.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/popper.js@1.16.1/dist/umd/popper.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/js/bootstrap.min.js"></script>
<link href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/css/bootstrap.min.css" rel="stylesheet" >
<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-multiselect/1.1.2/js/bootstrap-multiselect.min.js"></script>
<link href="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-multiselect/1.1.2/css/bootstrap-multiselect.css" rel="stylesheet"/>
<style>${STYLESHEET}</style>
</head>
<body>
<div id="addon">
<div class="logo">
<img src="${logo}">
</div>
<h1 class="name">${manifest.name}</h1>
<h2 class="version">${manifest.version || '0.0.0'}</h2>
<h2 class="description">${manifest.description || ''}</h2>
<div class="separator"></div>
<h3 class="gives">This addon has more :</h3>
<ul>
${stylizedTypes.map(t => `<li>${t}</li>`).join('')}
</ul>
<div class="separator"></div>
<label class="label" for="iProviders">Providers:</label>
<select id="iProviders" class="input" onchange="generateInstallLink()" name="providers[]" multiple="multiple">
${providersHTML}
</select>
<label class="label" for="iSort">Sorting:</label>
<select id="iSort" class="input" onchange="sortModeChange()">
${sortOptionsHTML}
</select>
<label class="label" for="iLanguages">Priority foreign language:</label>
<select id="iLanguages" class="input" onchange="generateInstallLink()" name="languages[]" multiple="multiple" title="Streams with the selected dubs/subs language will be shown on the top">
${languagesOptionsHTML}
</select>
<label class="label" for="iQualityFilter">Exclude qualities/resolutions:</label>
<select id="iQualityFilter" class="input" onchange="generateInstallLink()" name="qualityFilters[]" multiple="multiple">
${qualityFiltersHTML}
</select>
<label class="label" id="iLimitLabel" for="iLimit">Max results per quality:</label>
<input type="text" inputmode="numeric" pattern="[0-9]*" id="iLimit" onchange="generateInstallLink()" class="input" placeholder="All results">
<label class="label" id="iSizeFilterLabel" for="iSizeFilter">Video size limit:</label>
<input type="text" pattern="([0-9.]*(?:MB|GB),?)+" id="iSizeFilter" onchange="generateInstallLink()" class="input" placeholder="No limit" title="Returned videos cannot exceed this size, use comma to have different size for movies and series. Examples: 5GB ; 800MB ; 10GB,2GB">
<label class="label" for="iDebridProviders">Debrid provider:</label>
<select id="iDebridProviders" class="input" onchange="debridProvidersChange()">
<option value="none" selected>None</option>
${debridProvidersHTML}
</select>
<div id="dRealDebrid">
<label class="label" for="iRealDebrid">RealDebrid API Key (Find it <a href='https://real-debrid.com/apitoken' target="_blank">here</a>):</label>
<input type="text" id="iRealDebrid" onchange="generateInstallLink()" class="input">
</div>
<div id="dAllDebrid">
<label class="label" for="iAllDebrid">AllDebrid API Key (Create it <a href='https://alldebrid.com/apikeys' target="_blank">here</a>):</label>
<input type="text" id="iAllDebrid" onchange="generateInstallLink()" class="input">
</div>
<div id="dPremiumize">
<label class="label" for="iPremiumize">Premiumize API Key (Find it <a href='https://www.premiumize.me/account' target="_blank">here</a>):</label>
<input type="text" id="iPremiumize" onchange="generateInstallLink()" class="input">
</div>
<div id="dDebridLink">
<label class="label" for="iDebridLink">DebridLink API Key (Find it <a href='https://debrid-link.fr/webapp/apikey' target="_blank">here</a>):</label>
<input type="text" id="iDebridLink" onchange="generateInstallLink()" class="input">
</div>
<div id="dOffcloud">
<label class="label" for="iOffcloud">Offcloud API Key (Find it <a href='https://offcloud.com/#/account' target="_blank">here</a>):</label>
<input type="text" id="iOffcloud" onchange="generateInstallLink()" class="input">
</div>
<div id="dPutio">
<label class="label" for="iPutio">Put.io ClientId and Token (Create new OAuth App <a href='https://app.put.io/oauth' target="_blank">here</a>):</label>
<input type="text" id="iPutioClientId" placeholder="ClientId" onchange="generateInstallLink()" class="input">
<input type="text" id="iPutioToken" placeholder="Token" onchange="generateInstallLink()" class="input">
</div>
<div id="dDebridOptions">
<label class="label" for="iDebridOptions">Debrid options:</label>
<select id="iDebridOptions" class="input" onchange="generateInstallLink()" name="debridOptions[]" multiple="multiple">
${debridOptionsHTML}
</select>
</div>
<div class="separator"></div>
<a id="installLink" class="install-link" href="#">
<button name="Install" class="install-button">INSTALL</button>
</a>
<div class="contact">
<p>Or paste into Stremio search bar after clicking install</p>
</div>
<div class="separator"></div>
</div>
<script type="text/javascript">
$(document).ready(function() {
const isTvMedia = window.matchMedia("tv").matches;
const isTvAgent = /\\b(?:tv|wv)\\b/i.test(navigator.userAgent)
const isDesktopMedia = window.matchMedia("(pointer:fine)").matches;
if (isDesktopMedia && !isTvMedia && !isTvAgent) {
$('#iProviders').multiselect({
nonSelectedText: 'All providers',
buttonTextAlignment: 'left',
onChange: () => generateInstallLink()
});
$('#iProviders').multiselect('select', [${providers.map(provider => '"' + provider + '"')}]);
$('#iLanguages').multiselect({
nonSelectedText: 'None',
buttonTextAlignment: 'left',
onChange: () => generateInstallLink()
});
$('#iLanguages').multiselect('select', [${languages.map(language => '"' + language + '"')}]);
$('#iQualityFilter').multiselect({
nonSelectedText: 'None',
buttonTextAlignment: 'left',
onChange: () => generateInstallLink()
});
$('#iQualityFilter').multiselect('select', [${qualityFilters.map(filter => '"' + filter + '"')}]);
$('#iDebridOptions').multiselect({
nonSelectedText: 'None',
buttonTextAlignment: 'left',
onChange: () => generateInstallLink()
});
$('#iDebridOptions').multiselect('select', [${debridOptions.map(option => '"' + option + '"')}]);
} else {
$('#iProviders').val([${providers.map(provider => '"' + provider + '"')}]);
$('#iLanguages').val([${languages.map(language => '"' + language + '"')}]);
$('#iQualityFilter').val([${qualityFilters.map(filter => '"' + filter + '"')}]);
$('#iDebridOptions').val([${debridOptions.map(option => '"' + option + '"')}]);
}
$('#iDebridProviders').val("${debridProvider || 'none'}");
$('#iRealDebrid').val("${realDebridApiKey}");
$('#iPremiumize').val("${premiumizeApiKey}");
$('#iAllDebrid').val("${allDebridApiKey}");
$('#iDebridLink').val("${debridLinkApiKey}");
$('#iOffcloud').val("${offcloudApiKey}");
$('#iPutioClientId').val("${putioClientId}");
$('#iPutioToken').val("${putioToken}");
$('#iSort').val("${sort}");
$('#iLimit').val("${limit}");
$('#iSizeFilter').val("${sizeFilter}");
generateInstallLink();
debridProvidersChange();
});
function sortModeChange() {
if (['${SortOptions.options.seeders.key}', '${SortOptions.options.size.key}'].includes($('#iSort').val())) {
$("#iLimitLabel").text("Max results:");
} else {
$("#iLimitLabel").text("Max results per quality:");
}
generateInstallLink();
}
function debridProvidersChange() {
const provider = $('#iDebridProviders').val()
$('#dDebridOptions').toggle(provider !== 'none');
$('#dRealDebrid').toggle(provider === '${MochOptions.realdebrid.key}');
$('#dPremiumize').toggle(provider === '${MochOptions.premiumize.key}');
$('#dAllDebrid').toggle(provider === '${MochOptions.alldebrid.key}');
$('#dDebridLink').toggle(provider === '${MochOptions.debridlink.key}');
$('#dOffcloud').toggle(provider === '${MochOptions.offcloud.key}');
$('#dPutio').toggle(provider === '${MochOptions.putio.key}');
}
function generateInstallLink() {
const providersList = $('#iProviders').val() || [];
const providersValue = providersList.join(',');
const qualityFilterValue = $('#iQualityFilter').val().join(',') || '';
const sortValue = $('#iSort').val() || '';
const languagesValue = $('#iLanguages').val().join(',') || [];
const limitValue = $('#iLimit').val() || '';
const sizeFilterValue = $('#iSizeFilter').val() || '';
const debridOptionsValue = $('#iDebridOptions').val().join(',') || '';
const realDebridValue = $('#iRealDebrid').val() || '';
const allDebridValue = $('#iAllDebrid').val() || '';
const debridLinkValue = $('#iDebridLink').val() || ''
const premiumizeValue = $('#iPremiumize').val() || '';
const offcloudValue = $('#iOffcloud').val() || ''
const putioClientIdValue = $('#iPutioClientId').val() || '';
const putioTokenValue = $('#iPutioToken').val() || '';
const providers = providersList.length && providersList.length < ${Providers.options.length} && providersValue;
const qualityFilters = qualityFilterValue.length && qualityFilterValue;
const sort = sortValue !== '${SortOptions.options.qualitySeeders.key}' && sortValue;
const languages = languagesValue.length && languagesValue;
const limit = /^[1-9][0-9]{0,2}$/.test(limitValue) && limitValue;
const sizeFilter = sizeFilterValue.length && sizeFilterValue;
const debridOptions = debridOptionsValue.length && debridOptionsValue.trim();
const realDebrid = realDebridValue.length && realDebridValue.trim();
const premiumize = premiumizeValue.length && premiumizeValue.trim();
const allDebrid = allDebridValue.length && allDebridValue.trim();
const debridLink = debridLinkValue.length && debridLinkValue.trim();
const offcloud = offcloudValue.length && offcloudValue.trim();
const putio = putioClientIdValue.length && putioTokenValue.length && putioClientIdValue.trim() + '@' + putioTokenValue.trim();
let configurationValue = [
['${Providers.key}', providers],
['${SortOptions.key}', sort],
['${LanguageOptions.key}', languages],
['${QualityFilter.key}', qualityFilters],
['limit', limit],
['${SizeFilter.key}', sizeFilter],
['${DebridOptions.key}', debridOptions],
['${MochOptions.realdebrid.key}', realDebrid],
['${MochOptions.premiumize.key}', premiumize],
['${MochOptions.alldebrid.key}', allDebrid],
['${MochOptions.debridlink.key}', debridLink],
['${MochOptions.offcloud.key}', offcloud],
['${MochOptions.putio.key}', putio]
].filter(([_, value]) => value.length).map(([key, value]) => key + '=' + value).join('|');
const configuration = configurationValue && configurationValue.length ? '/' + configurationValue : '';
const location = window.location.host + configuration + '/manifest.json'
installLink.href = 'stremio://' + location;
console.log("Install link: " + installLink.href.replace('stremio://', 'https://'));
}
installLink.addEventListener('click', function() {
navigator.clipboard.writeText(installLink.href.replace('stremio://', 'https://'));
});
</script>
</body>
</html>`
}

View File

@@ -0,0 +1,76 @@
const languageMapping = {
'dubbed': 'Dubbed',
'multi audio': 'Multi Audio',
'multi subs': 'Multi Subs',
'dual audio': 'Dual Audio',
'english': '🇬🇧',
'japanese': '🇯🇵',
'russian': '🇷🇺',
'italian': '🇮🇹',
'portuguese': '🇵🇹',
'spanish': '🇪🇸',
'latino': '🇲🇽',
'korean': '🇰🇷',
'chinese': '🇨🇳',
'taiwanese': '🇹🇼',
'french': '🇫🇷',
'german': '🇩🇪',
'dutch': '🇳🇱',
'hindi': '🇮🇳',
'telugu': '🇮🇳',
'tamil': '🇮🇳',
'polish': '🇵🇱',
'lithuanian': '🇱🇹',
'latvian': '🇱🇻',
'estonian': '🇪🇪',
'czech': '🇨🇿',
'slovakian': '🇸🇰',
'slovenian': '🇸🇮',
'hungarian': '🇭🇺',
'romanian': '🇷🇴',
'bulgarian': '🇧🇬',
'serbian': '🇷🇸 ',
'croatian': '🇭🇷',
'ukrainian': '🇺🇦',
'greek': '🇬🇷',
'danish': '🇩🇰',
'finnish': '🇫🇮',
'swedish': '🇸🇪',
'norwegian': '🇳🇴',
'turkish': '🇹🇷',
'arabic': '🇸🇦',
'persian': '🇮🇷',
'hebrew': '🇮🇱',
'vietnamese': '🇻🇳',
'indonesian': '🇮🇩',
'malay': '🇲🇾',
'thai': '🇹🇭'
}
export const LanguageOptions = {
key: 'language',
options: Object.keys(languageMapping).slice(5).map(lang => ({
key: lang,
label: `${languageMapping[lang]} ${lang.charAt(0).toUpperCase()}${lang.slice(1)}`
}))
}
export function mapLanguages(languages) {
const mapped = languages
.map(language => languageMapping[language])
.filter(language => language)
.sort((a, b) => Object.values(languageMapping).indexOf(a) - Object.values(languageMapping).indexOf(b));
const unmapped = languages
.filter(language => !languageMapping[language])
.sort((a, b) => a.localeCompare(b))
return [...new Set([].concat(mapped).concat(unmapped))];
}
export function containsLanguage(stream, languages) {
return languages.map(lang => languageMapping[lang]).some(lang => stream.title.includes(lang));
}
export function languageFromCode(code) {
const entry = Object.entries(languageMapping).find(entry => entry[1] === code);
return entry?.[0];
}

View File

@@ -0,0 +1,114 @@
import axios from 'axios';
import magnet from 'magnet-uri';
import { getRandomUserAgent } from './requestHelper.js';
import { getTorrent } from './repository.js';
import { Type } from './types.js';
import { extractProvider } from "./titleHelper.js";
import { Providers } from "./filter.js";
const TRACKERS_URL = 'https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt';
const DEFAULT_TRACKERS = [
"udp://47.ip-51-68-199.eu:6969/announce",
"udp://9.rarbg.me:2940",
"udp://9.rarbg.to:2820",
"udp://exodus.desync.com:6969/announce",
"udp://explodie.org:6969/announce",
"udp://ipv4.tracker.harry.lu:80/announce",
"udp://open.stealth.si:80/announce",
"udp://opentor.org:2710/announce",
"udp://opentracker.i2p.rocks:6969/announce",
"udp://retracker.lanta-net.ru:2710/announce",
"udp://tracker.cyberia.is:6969/announce",
"udp://tracker.dler.org:6969/announce",
"udp://tracker.ds.is:6969/announce",
"udp://tracker.internetwarriors.net:1337",
"udp://tracker.openbittorrent.com:6969/announce",
"udp://tracker.opentrackr.org:1337/announce",
"udp://tracker.tiny-vps.com:6969/announce",
"udp://tracker.torrent.eu.org:451/announce",
"udp://valakas.rollo.dnsabr.com:2710/announce",
"udp://www.torrent.eu.org:451/announce",
]
const ANIME_TRACKERS = [
"http://nyaa.tracker.wf:7777/announce",
"http://anidex.moe:6969/announce",
"http://tracker.anirena.com:80/announce",
"udp://tracker.uw0.xyz:6969/announce",
"http://share.camoe.cn:8080/announce",
"http://t.nyaatracker.com:80/announce",
];
const RUSSIAN_TRACKERS = [
"udp://opentor.net:6969",
"http://bt.t-ru.org/ann?magnet",
"http://bt2.t-ru.org/ann?magnet",
"http://bt3.t-ru.org/ann?magnet",
"http://bt4.t-ru.org/ann?magnet",
];
// Some trackers have limits on original torrent trackers,
// where downloading ip has to seed the torrents for some amount of time,
// thus it doesn't work on mochs.
// So it's better to exclude them and try to download through DHT,
// as the torrent won't start anyway.
const RUSSIAN_PROVIDERS = Providers.options
.filter(provider => provider.foreign === '🇷🇺')
.map(provider => provider.label);
const ANIME_PROVIDERS = Providers.options
.filter(provider => provider.anime)
.map(provider => provider.label);
let BEST_TRACKERS = [];
let ALL_ANIME_TRACKERS = [];
let ALL_RUSSIAN_TRACKERS = [];
export async function getMagnetLink(infoHash) {
const torrent = await getTorrent(infoHash).catch(() => ({ infoHash }));
const torrentTrackers = torrent?.trackers?.split(',') || [];
const animeTrackers = torrent.type === Type.ANIME ? ALL_ANIME_TRACKERS : [];
const providerTrackers = RUSSIAN_PROVIDERS.includes(torrent.provider) && ALL_RUSSIAN_TRACKERS || [];
const trackers = unique([].concat(torrentTrackers).concat(animeTrackers).concat(providerTrackers));
return magnet.encode({ infoHash: infoHash, name: torrent.title, announce: trackers });
}
export async function initBestTrackers() {
BEST_TRACKERS = await getBestTrackers();
ALL_ANIME_TRACKERS = unique(BEST_TRACKERS.concat(DEFAULT_TRACKERS).concat(ANIME_TRACKERS));
ALL_RUSSIAN_TRACKERS = unique(BEST_TRACKERS.concat(DEFAULT_TRACKERS).concat(RUSSIAN_TRACKERS));
}
async function getBestTrackers(retry = 2) {
const options = { timeout: 30000, headers: { 'User-Agent': getRandomUserAgent() } };
return axios.get(TRACKERS_URL, options)
.then(response => response?.data?.trim()?.split('\n\n') || [])
.catch(error => {
if (retry === 0) {
console.log(`Failed retrieving best trackers: ${error.message}`);
throw error;
}
return getBestTrackers(retry - 1);
});
}
export function getSources(trackersInput, infoHash) {
if (!trackersInput) {
return null;
}
const trackers = Array.isArray(trackersInput) ? trackersInput : trackersInput.split(',');
return trackers.map(tracker => `tracker:${tracker}`).concat(`dht:${infoHash}`);
}
export function enrichStreamSources(stream) {
const provider = extractProvider(stream.title);
if (ANIME_PROVIDERS.includes(provider)) {
const sources = getSources(ALL_ANIME_TRACKERS, stream.infoHash);
return { ...stream, sources };
}
if (RUSSIAN_PROVIDERS.includes(provider)) {
const sources = unique([].concat(stream.sources || []).concat(getSources(ALL_RUSSIAN_TRACKERS, stream.infoHash)));
return { ...stream, sources };
}
return stream;
}
function unique(array) {
return Array.from(new Set(array));
}

View File

@@ -0,0 +1,69 @@
import { MochOptions } from '../moch/moch.js';
import { showDebridCatalog } from '../moch/options.js';
import { Type } from './types.js';
const CatalogMochs = Object.values(MochOptions).filter(moch => moch.catalog);
export function manifest(config = {}) {
return {
id: 'com.stremio.selfhostio.selfhostio',
version: '0.0.1',
name: getName(config),
description: getDescription(config),
catalogs: getCatalogs(config),
resources: getResources(config),
types: [Type.MOVIE, Type.SERIES, Type.ANIME, Type.OTHER],
behaviorHints: {
configurable: true,
configurationRequired: false,
}
};
}
export function dummyManifest() {
const manifestDefault = manifest();
manifestDefault.catalogs = [{ id: 'dummy', type: Type.OTHER }];
manifestDefault.resources = ['stream', 'meta'];
return manifestDefault;
}
function getName(config) {
const rootName = 'selfhostio';
const mochSuffix = Object.values(MochOptions)
.filter(moch => config[moch.key])
.map(moch => moch.shortName)
.join('/');
return [rootName, mochSuffix].filter(v => v).join(' ');
}
function getDescription(config) {
return 'Selfhostio the Torrentio brings you much Funio';
}
function getCatalogs(config) {
return CatalogMochs
.filter(moch => showDebridCatalog(config) && config[moch.key])
.map(moch => ({
id: `selfhostio-${moch.key}`,
name: `${moch.name}`,
type: 'other',
extra: [{ name: 'skip' }],
}));
}
function getResources(config) {
const streamResource = {
name: 'stream',
types: [Type.MOVIE, Type.SERIES],
idPrefixes: ['tt', 'kitsu']
};
const metaResource = {
name: 'meta',
types: [Type.OTHER],
idPrefixes: CatalogMochs.filter(moch => config[moch.key]).map(moch => moch.key)
};
if (showDebridCatalog(config) && CatalogMochs.filter(moch => config[moch.key]).length) {
return [streamResource, metaResource];
}
return [streamResource];
}

View File

@@ -0,0 +1,20 @@
/**
* Delay promise
*/
export async function delay(duration) {
return new Promise((resolve) => setTimeout(resolve, duration));
}
/**
* Timeout promise after a set time in ms
*/
export async function timeout(timeoutMs, promise, message = 'Timed out') {
return Promise.race([
promise,
new Promise(function (resolve, reject) {
setTimeout(function () {
reject(message);
}, timeoutMs);
})
]);
}

View File

@@ -0,0 +1,131 @@
import { Sequelize } from 'sequelize';
const Op = Sequelize.Op;
const DATABASE_URI = process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/postgres';
const database = new Sequelize(DATABASE_URI, { logging: false });
const Torrent = database.define('torrent',
{
infoHash: { type: Sequelize.STRING(64), primaryKey: true },
provider: { type: Sequelize.STRING(32), allowNull: false },
torrentId: { type: Sequelize.STRING(128) },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
type: { type: Sequelize.STRING(16), allowNull: false },
uploadDate: { type: Sequelize.DATE, allowNull: false },
seeders: { type: Sequelize.SMALLINT },
trackers: { type: Sequelize.STRING(4096) },
languages: { type: Sequelize.STRING(4096) },
resolution: { type: Sequelize.STRING(16) }
}
);
const File = database.define('file',
{
id: { type: Sequelize.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER },
title: { type: Sequelize.STRING(256), allowNull: false },
size: { type: Sequelize.BIGINT },
imdbId: { type: Sequelize.STRING(32) },
imdbSeason: { type: Sequelize.INTEGER },
imdbEpisode: { type: Sequelize.INTEGER },
kitsuId: { type: Sequelize.INTEGER },
kitsuEpisode: { type: Sequelize.INTEGER }
},
);
const Subtitle = database.define('subtitle',
{
infoHash: {
type: Sequelize.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: Sequelize.INTEGER, allowNull: false },
fileId: {
type: Sequelize.BIGINT,
allowNull: true,
references: { model: File, key: 'id' },
onDelete: 'SET NULL'
},
title: { type: Sequelize.STRING(512), allowNull: false },
size: { type: Sequelize.BIGINT, allowNull: false },
},
{ timestamps: false }
);
Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false });
File.belongsTo(Torrent, { foreignKey: 'infoHash', constraints: false });
File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
export function getTorrent(infoHash) {
return Torrent.findOne({ where: { infoHash: infoHash } });
}
export function getFiles(infoHashes) {
return File.findAll({ where: { infoHash: { [Op.in]: infoHashes} } });
}
export function getImdbIdMovieEntries(imdbId) {
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getImdbIdSeriesEntries(imdbId, season, episode) {
return File.findAll({
where: {
imdbId: { [Op.eq]: imdbId },
imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getKitsuIdMovieEntries(kitsuId) {
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}
export function getKitsuIdSeriesEntries(kitsuId, episode) {
return File.findAll({
where: {
kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode }
},
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
]
});
}

View File

@@ -0,0 +1,6 @@
import UserAgent from 'user-agents';
const userAgent = new UserAgent();
export function getRandomUserAgent() {
return userAgent.random().toString();
}

View File

@@ -0,0 +1,50 @@
import { containsLanguage, LanguageOptions } from './languages.js';
import { extractSize } from './titleHelper.js';
export const SortOptions = {
key: 'sort',
options: {
qualitySeeders: {
key: 'quality',
description: 'By quality then seeders'
},
qualitySize: {
key: 'qualitysize',
description: 'By quality then size'
},
seeders: {
key: 'seeders',
description: 'By seeders'
},
size: {
key: 'size',
description: 'By size'
},
}
}
export default function sortStreams(streams, config) {
const languages = config[LanguageOptions.key];
if (languages?.length && languages[0] !== 'english') {
// No need to filter english since it's hard to predict which entries are english
const streamsWithLanguage = streams.filter(stream => containsLanguage(stream, languages));
const streamsNoLanguage = streams.filter(stream => !streamsWithLanguage.includes(stream));
return _sortStreams(streamsWithLanguage, config).concat(_sortStreams(streamsNoLanguage, config));
}
return _sortStreams(streams, config);
}
function _sortStreams(streams, config) {
const limit = /^[1-9][0-9]*$/.test(config.limit) && parseInt(config.limit) || undefined;
return sortBySize(streams, limit);
}
function sortBySize(streams, limit) {
return streams
.sort((a, b) => {
const aSize = extractSize(a.title);
const bSize = extractSize(b.title);
return bSize - aSize;
}).slice(0, limit);
}

View File

@@ -0,0 +1,148 @@
import titleParser from 'parse-torrent-title';
import { Type } from './types.js';
import { mapLanguages } from './languages.js';
import { enrichStreamSources, getSources } from './magnetHelper.js';
import { getSubtitles } from './subtitles.js';
const ADDON_NAME = 'selfhostio';
const SIZE_DELTA = 0.02;
const UNKNOWN_SIZE = 300000000;
const CAM_SOURCES = ['CAM', 'TeleSync', 'TeleCine', 'SCR'];
export function toStreamInfo(record) {
const torrentInfo = titleParser.parse(record.torrent.title);
const fileInfo = titleParser.parse(record.title);
const sameInfo = !Number.isInteger(record.fileIndex)
|| Math.abs(record.size / record.torrent.size - 1) < SIZE_DELTA
|| record.title.includes(record.torrent.title);
const quality = getQuality(record, torrentInfo, fileInfo);
const hdrProfiles = torrentInfo.hdr || fileInfo.hdr || []
const title = joinDetailParts(
[
joinDetailParts([record.torrent.title.replace(/[, ]+/g, ' ')]),
joinDetailParts([!sameInfo && record.title || undefined]),
joinDetailParts([
joinDetailParts([formatSize(record.size)], '💾 ')
]),
joinDetailParts(getLanguages(record, torrentInfo, fileInfo), '', ' / '),
],
'',
'\n'
);
const name = joinDetailParts(
[
joinDetailParts([ADDON_NAME]),
joinDetailParts([quality, joinDetailParts(hdrProfiles, '', ' | ')])
],
'',
'\n'
);
const bingeGroupParts = getBingeGroupParts(record, sameInfo, quality, torrentInfo, fileInfo);
const bingeGroup = joinDetailParts(bingeGroupParts, "selfhostio|", "|")
const behaviorHints = bingeGroup ? { bingeGroup } : undefined;
return cleanOutputObject({
name: name,
title: title,
infoHash: record.infoHash,
fileIdx: record.fileIndex,
behaviorHints: behaviorHints,
sources: getSources(record.torrent.trackers, record.infoHash),
subtitles: getSubtitles(record)
});
}
function getQuality(record, torrentInfo, fileInfo) {
if (CAM_SOURCES.includes(fileInfo.source)) {
return fileInfo.source;
}
if (CAM_SOURCES.includes(torrentInfo.source)) {
return torrentInfo.source;
}
const resolution = fileInfo.resolution || torrentInfo.resolution || record.torrent.resolution;
const source = fileInfo.source || torrentInfo.source;
return resolution || source;
}
function getLanguages(record, torrentInfo, fileInfo) {
const providerLanguages = record.torrent.languages && titleParser.parse(record.torrent.languages + '.srt').languages || [];
const torrentLanguages = torrentInfo.languages || [];
const fileLanguages = fileInfo.languages || [];
const dubbed = torrentInfo.dubbed || fileInfo.dubbed;
let languages = Array.from(new Set([].concat(torrentLanguages).concat(fileLanguages).concat(providerLanguages)));
if (record.kitsuId || record.torrent.type === Type.ANIME) {
// no need to display japanese for anime
languages = languages.concat(dubbed ? ['dubbed'] : [])
.filter(lang => lang !== 'japanese');
}
if (languages.length === 1 && languages.includes('english')) {
// no need to display languages if only english is present
languages = [];
}
if (languages.length === 0 && dubbed) {
// display dubbed only if there are no other languages defined for non anime
languages = ['dubbed'];
}
return mapLanguages(languages);
}
function joinDetailParts(parts, prefix = '', delimiter = ' ') {
const filtered = parts.filter((part) => part !== undefined && part !== null).join(delimiter);
return filtered.length > 0 ? `${prefix}${filtered}` : undefined;
}
function formatSize(size) {
if (!size) {
return undefined;
}
if (size === UNKNOWN_SIZE) {
return undefined;
}
const i = size === 0 ? 0 : Math.floor(Math.log(size) / Math.log(1024));
return Number((size / Math.pow(1024, i)).toFixed(2)) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i];
}
export function applyStaticInfo(streams) {
return streams.map(stream => enrichStaticInfo(stream));
}
function enrichStaticInfo(stream) {
return enrichSubtitles(enrichStreamSources({ ...stream }));
}
function enrichSubtitles(stream) {
if (stream.subtitles?.length) {
stream.subtitles = stream.subtitles.map(subtitle =>{
if (subtitle.url) {
return subtitle;
}
return {
id: `${subtitle.fileIndex}`,
lang: subtitle.lang,
url: `http://localhost:11470/${subtitle.infoHash}/${subtitle.fileIndex}/${subtitle.title.split('/').pop()}`
};
});
}
return stream;
}
function getBingeGroupParts(record, sameInfo, quality, torrentInfo, fileInfo) {
if (record.torrent.type === Type.MOVIE) {
const source = torrentInfo.source || fileInfo.source
return [quality]
.concat(source !== quality ? source : [])
.concat(torrentInfo.codec || fileInfo.codec)
.concat(torrentInfo.bitDepth || fileInfo.bitDepth)
.concat(torrentInfo.hdr || fileInfo.hdr);
} else if (sameInfo) {
return [quality]
.concat(fileInfo.hdr)
.concat(fileInfo.group);
}
return [record.infoHash];
}
function cleanOutputObject(object) {
return Object.fromEntries(Object.entries(object).filter(([_, v]) => v != null));
}

View File

@@ -0,0 +1,99 @@
import { parse } from 'parse-torrent-title';
import { isExtension } from './extension.js';
import { Providers } from './filter.js';
import { languageFromCode } from './languages.js';
const languageMapping = {
'english': 'eng',
'japanese': 'jpn',
'russian': 'rus',
'italian': 'ita',
'portuguese': 'por',
'spanish': 'spa',
'latino': 'lat',
'korean': 'kor',
'chinese': 'zho',
'taiwanese': 'zht',
'french': 'fre',
'german': 'ger',
'dutch': 'dut',
'hindi': 'hin ',
'telugu': 'tel',
'tamil': 'tam',
'polish': 'pol',
'lithuanian': 'lit',
'latvian': 'lav',
'estonian': 'est',
'czech': 'cze',
'slovakian': 'slo',
'slovenian': 'slv',
'hungarian': 'hun',
'romanian': 'rum',
'bulgarian': 'bul',
'serbian': 'scc',
'croatian': 'hrv',
'ukrainian': 'ukr',
'greek': 'ell',
'danish': 'dan',
'finnish': 'fin',
'swedish': 'swe',
'norwegian': 'nor',
'turkish': 'tur',
'arabic': 'ara',
'persian': 'per',
'hebrew': 'heb',
'vietnamese': 'vie',
'indonesian': 'ind',
'thai': 'tha'
}
const ignoreSet = new Set(['dubbed', 'multi audio', 'multi subs', 'dual audio']);
const allowedExtensions = ['srt', 'vtt', 'ass', 'ssa'];
export function getSubtitles(record) {
if (!record?.subtitles?.length) {
return null;
}
return record.subtitles
.filter(subtitle => isExtension(subtitle.title, allowedExtensions))
.sort((a, b) => b.size - a.size)
.map(subtitle => ({
infoHash: subtitle.infoHash,
fileIndex: subtitle.fileIndex,
title: subtitle.title,
lang: parseLanguage(subtitle.title, record),
}));
}
function parseLanguage(title, record) {
const subtitlePathParts = title.split('/');
const subtitleFileName = subtitlePathParts.pop();
const subtitleTitleNoExt = title.replace(/\.\w{2,5}$/, '');
const videoFileName = record.title.split('/').pop().replace(/\.\w{2,5}$/, '');
const fileNameLanguage = getSingleLanguage(subtitleFileName.replace(videoFileName, ''));
if (fileNameLanguage) {
return fileNameLanguage;
}
const videoTitleNoExt = record.title.replace(/\.\w{2,5}$/, '');
if (subtitleTitleNoExt === record.title || subtitleTitleNoExt === videoTitleNoExt) {
const provider = Providers.options.find(provider => provider.label === record.torrent.provider);
return provider?.foreign && languageFromCode(provider.foreign) || 'eng';
}
const folderName = subtitlePathParts.join('/');
const folderNameLanguage = getSingleLanguage(folderName.replace(videoFileName, ''));
if (folderNameLanguage) {
return folderNameLanguage
}
return getFileNameLanguageCode(subtitleFileName) || 'Unknown';
}
function getSingleLanguage(title) {
const parsedInfo = parse(title);
const languages = (parsedInfo.languages || []).filter(language => !ignoreSet.has(language));
return languages.length === 1 ? languageMapping[languages[0]] : undefined;
}
function getFileNameLanguageCode(fileName) {
const match = fileName.match(/(?:(?:^|[._ ])([A-Za-z][a-z]{1,2})|\[([a-z]{2,3})])\.\w{3,4}$/);
return match?.[1]?.toLowerCase();
}

View File

@@ -0,0 +1,31 @@
export function extractSeeders(title) {
const seedersMatch = title.match(/👤 (\d+)/);
return seedersMatch && parseInt(seedersMatch[1]) || 0;
}
export function extractSize(title) {
const seedersMatch = title.match(/💾 ([\d.]+ \w+)/);
return seedersMatch && parseSize(seedersMatch[1]) || 0;
}
export function extractProvider(title) {
const match = title.match(/⚙.* ([^ \n]+)/);
return match?.[1]?.toLowerCase();
}
export function parseSize(sizeText) {
if (!sizeText) {
return 0;
}
let scale = 1;
if (sizeText.includes('TB')) {
scale = 1024 * 1024 * 1024 * 1024
} else if (sizeText.includes('GB')) {
scale = 1024 * 1024 * 1024
} else if (sizeText.includes('MB')) {
scale = 1024 * 1024;
} else if (sizeText.includes('kB')) {
scale = 1024;
}
return Math.floor(parseFloat(sizeText.replace(/,/g, '')) * scale);
}

View File

@@ -0,0 +1,6 @@
export const Type = {
MOVIE: 'movie',
SERIES: 'series',
ANIME: 'anime',
OTHER: 'other'
};

View File

@@ -0,0 +1,195 @@
import AllDebridClient from 'all-debrid-api';
import { Type } from '../lib/types.js';
import { isVideo, isArchive } from '../lib/extension.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { BadTokenError, AccessDeniedError, sameFilename } from './mochHelper.js';
const KEY = 'alldebrid';
const AGENT = 'selfhostio';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
const hashes = streams.map(stream => stream.infoHash);
const available = await AD.magnet.instant(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
return undefined;
});
return available?.data?.magnets && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status()
.then(response => response.data.magnets)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent.statusCode))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
}
export async function getItemMeta(itemId, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status(itemId)
.then(response => response.data.magnets)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.links
.filter(file => isVideo(file.filename))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.filename,
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const AD = new AllDebridClient(apiKey, options);
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
} else if (error.code === 'MAGNET_TOO_MANY') {
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(AD, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(AD, infoHash);
if (torrent && statusReady(torrent.statusCode)) {
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.statusCode)) {
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusHandledError(torrent.statusCode)) {
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(AD, infoHash) {
return _findTorrent(AD, infoHash)
.catch(() => _createTorrent(AD, infoHash));
}
async function _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(AD, infoHash);
return newTorrent && statusReady(newTorrent.statusCode)
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _deleteAndRetry(AD, infoHash, encodedFileName, fileIndex) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const lastTorrent = torrents[torrents.length - 1];
return AD.magnet.delete(lastTorrent.id)
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
}
async function _findTorrent(AD, infoHash) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(AD, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await AD.magnet.upload(magnetLink);
const torrentId = uploadResponse.data.magnets[0].id;
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
}
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos.sort((a, b) => b.size - a.size)[0];
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
return StaticResponse.FAILED_RAR;
}
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
}
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
async function getDefaultOptions(ip) {
return { base_agent: AGENT, timeout: 10000 };
}
export function toCommonError(error) {
if (error && error.code === 'AUTH_BAD_APIKEY') {
return BadTokenError;
}
if (error && error.code === 'AUTH_USER_BANNED') {
return AccessDeniedError;
}
return undefined;
}
function statusError(statusCode) {
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
}
function statusHandledError(statusCode) {
return [5, 7, 9, 10, 11].includes(statusCode);
}
function statusDownloading(statusCode) {
return [0, 1, 2, 3].includes(statusCode);
}
function statusReady(statusCode) {
return statusCode === 4;
}
function errorExpiredSubscriptionError(error) {
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
.includes(error.code);
}

View File

@@ -0,0 +1,155 @@
import DebridLinkClient from 'debrid-link-api';
import { Type } from '../lib/types.js';
import { isVideo, isArchive } from '../lib/extension.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { chunkArray, BadTokenError } from './mochHelper.js';
const KEY = 'debridlink';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
.map(batch => batch.join(','));
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
.then(results => results.map(result => result.value))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed DebridLink cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list()
.then(response => response.value)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list(itemId)
.then(response => response.value[0])
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name,
infoHash: torrent.hashString.toLowerCase(),
videos: torrent.files
.filter(file => isVideo(file.name))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.name,
released: new Date(torrent.created * 1000 - index).toISOString(),
streams: [{ url: file.downloadUrl }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return _resolve(DL, infoHash, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(DL, infoHash, fileIndex) {
const torrent = await _createOrFindTorrent(DL, infoHash);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(DL, torrent, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(DL, infoHash) {
return _findTorrent(DL, infoHash)
.catch(() => _createTorrent(DL, infoHash));
}
async function _findTorrent(DL, infoHash) {
const torrents = await DL.seedbox.list().then(response => response.value);
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
return foundTorrents[0] || Promise.reject('No recent torrent found');
}
async function _createTorrent(DL, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
return uploadResponse.value;
}
async function _unrestrictLink(DL, torrent, fileIndex) {
const targetFile = Number.isInteger(fileIndex)
? torrent.files[fileIndex]
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
if (!targetFile || !targetFile.downloadUrl) {
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
return targetFile.downloadUrl;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error === 'badToken') {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return torrent.downloadPercent < 100
}
function statusReady(torrent) {
return torrent.downloadPercent === 100;
}
function errorExpiredSubscriptionError(error) {
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
}

240
src/node/addon/moch/moch.js Normal file
View File

@@ -0,0 +1,240 @@
import namedQueue from 'named-queue';
import * as options from './options.js';
import * as realdebrid from './realdebrid.js';
import * as premiumize from './premiumize.js';
import * as alldebrid from './alldebrid.js';
import * as debridlink from './debridlink.js';
import * as offcloud from './offcloud.js';
import * as putio from './putio.js';
import StaticResponse, { isStaticUrl } from './static.js';
import { cacheWrapResolvedUrl } from '../lib/cache.js';
import { timeout } from '../lib/promises.js';
import { BadTokenError, streamFilename, AccessDeniedError, enrichMeta } from './mochHelper.js';
const RESOLVE_TIMEOUT = 2 * 60 * 1000; // 2 minutes
const MIN_API_KEY_SYMBOLS = 15;
const TOKEN_BLACKLIST = [];
export const MochOptions = {
realdebrid: {
key: 'realdebrid',
instance: realdebrid,
name: "RealDebrid",
shortName: 'RD',
catalog: true
},
premiumize: {
key: 'premiumize',
instance: premiumize,
name: 'Premiumize',
shortName: 'PM',
catalog: true
},
alldebrid: {
key: 'alldebrid',
instance: alldebrid,
name: 'AllDebrid',
shortName: 'AD',
catalog: true
},
debridlink: {
key: 'debridlink',
instance: debridlink,
name: 'DebridLink',
shortName: 'DL',
catalog: true
},
offcloud: {
key: 'offcloud',
instance: offcloud,
name: 'Offcloud',
shortName: 'OC',
catalog: true
},
putio: {
key: 'putio',
instance: putio,
name: 'Put.io',
shortName: 'Putio',
catalog: true
}
};
const unrestrictQueues = {}
Object.values(MochOptions)
.map(moch => moch.key)
.forEach(mochKey => unrestrictQueues[mochKey] = new namedQueue((task, callback) => task.method()
.then(result => callback(false, result))
.catch((error => callback(error))), 200));
export function hasMochConfigured(config) {
return Object.keys(MochOptions).find(moch => config?.[moch])
}
export async function applyMochs(streams, config) {
if (!streams?.length || !hasMochConfigured(config)) {
return streams;
}
return Promise.all(Object.keys(config)
.filter(configKey => MochOptions[configKey])
.map(configKey => MochOptions[configKey])
.map(moch => {
if (isInvalidToken(config[moch.key], moch.key)) {
return { moch, error: BadTokenError };
}
return moch.instance.getCachedStreams(streams, config[moch.key])
.then(mochStreams => ({ moch, mochStreams }))
.catch(rawError => {
const error = moch.instance.toCommonError(rawError) || rawError;
if (error === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return { moch, error };
})
}))
.then(results => processMochResults(streams, config, results));
}
export async function resolve(parameters) {
const moch = MochOptions[parameters.mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
}
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
return Promise.reject(new Error("No valid parameters passed"));
}
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
.catch(error => {
console.warn(error);
return StaticResponse.FAILED_UNEXPECTED;
})
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
const unrestrictQueue = unrestrictQueues[moch.key];
return new Promise(((resolve, reject) => {
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
}));
}
export async function getMochCatalog(mochKey, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
if (isInvalidToken(config[mochKey], mochKey)) {
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
}
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
.catch(rawError => {
const commonError = moch.instance.toCommonError(rawError);
if (commonError === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return commonError ? [] : Promise.reject(rawError);
});
}
export async function getMochItemMeta(mochKey, itemId, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
.then(meta => enrichMeta(meta))
.then(meta => {
meta.videos.forEach(video => video.streams.forEach(stream => {
if (!stream.url.startsWith('http')) {
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
}
stream.behaviorHints = { bingeGroup: itemId }
}))
return meta;
});
}
function processMochResults(streams, config, results) {
const errorResults = results
.map(result => errorStreamResponse(result.moch.key, result.error, config))
.filter(errorResponse => errorResponse);
if (errorResults.length) {
return errorResults;
}
const includeTorrentLinks = options.includeTorrentLinks(config);
const excludeDownloadLinks = options.excludeDownloadLinks(config);
const mochResults = results.filter(result => result?.mochStreams);
const cachedStreams = mochResults
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
}
function populateCachedLinks(streams, mochResult, config) {
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
};
}
return stream;
});
}
function populateDownloadLinks(streams, mochResults, config) {
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[stream.infoHash];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
})
}
}));
return streams;
}
function isHealthyStreamForDebrid(streams, stream) {
const isZeroSeeders = stream.title.includes('👤 0');
const is4kStream = stream.name.includes('4k');
const isNotEnoughOptions = streams.length <= 5;
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
}
function isInvalidToken(token, mochKey) {
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
}
function blackListToken(token, mochKey) {
const tokenKey = `${mochKey}|${token}`;
console.log(`Blacklisting invalid token: ${tokenKey}`)
TOKEN_BLACKLIST.push(tokenKey);
}
function errorStreamResponse(mochKey, error, config) {
if (error === BadTokenError) {
return {
name: `Selfhostio\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
if (error === AccessDeniedError) {
return {
name: `Selfhostio\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
return undefined;
}

View File

@@ -0,0 +1,63 @@
import * as repository from '../lib/repository.js';
const METAHUB_URL = 'https://images.metahub.space'
export const BadTokenError = { code: 'BAD_TOKEN' }
export const AccessDeniedError = { code: 'ACCESS_DENIED' }
export function chunkArray(arr, size) {
return arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
}
export function streamFilename(stream) {
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const filename = titleParts.pop().split('/').pop();
return encodeURIComponent(filename)
}
export async function enrichMeta(itemMeta) {
const infoHashes = [...new Set([itemMeta.infoHash]
.concat(itemMeta.videos.map(video => video.infoHash))
.filter(infoHash => infoHash))];
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
if (files.length) {
return {
...itemMeta,
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
videos: itemMeta.videos.map(video => {
const file = files.find(file => sameFilename(video.title, file.title));
if (file?.imdbId) {
if (file.imdbSeason && file.imdbEpisode) {
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
video.season = file.imdbSeason;
video.episode = file.imdbEpisode;
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
} else {
video.id = file.imdbId;
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
}
}
return video;
})
}
}
return itemMeta
}
export function sameFilename(filename, expectedFilename) {
const offset = filename.length - expectedFilename.length;
for (let i = 0; i < expectedFilename.length; i++) {
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
return false;
}
}
return true;
}
function mostCommonValue(array) {
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
}

View File

@@ -0,0 +1,186 @@
import OffcloudClient from 'offcloud-api';
import magnet from 'magnet-uri';
import { Type } from '../lib/types.js';
import { isVideo } from '../lib/extension.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { chunkArray, BadTokenError, sameFilename } from './mochHelper.js';
const KEY = 'offcloud';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
.then(results => results.map(result => result.cachedItems))
.then(results => results.reduce((all, result) => all.concat(result), []))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Offcloud cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: isCached
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
return OC.cloud.history()
.then(torrents => torrents)
.then(torrents => (torrents || [])
.map(torrent => ({
id: `${KEY}:${torrent.requestId}`,
type: Type.OTHER,
name: torrent.fileName
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
const torrents = await OC.cloud.history();
const torrent = torrents.find(torrent => torrent.requestId === itemId)
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
return _getFileUrls(OC, torrent)
.then(files => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: torrent.name,
infoHash: infoHash,
videos: files
.filter(file => isVideo(file))
.map((file, index) => ({
id: `${KEY}:${itemId}:${index}`,
title: file.split('/').pop(),
released: new Date(createDate.getTime() - index).toISOString(),
streams: [{ url: file }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(OC, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(OC, infoHash)
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
.then(info => info.status || info);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent)) {
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(OC, infoHash) {
return _findTorrent(OC, infoHash)
.catch(() => _createTorrent(OC, infoHash));
}
async function _findTorrent(OC, infoHash) {
const torrents = await OC.cloud.history();
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(OC, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return OC.cloud.download(magnetLink)
}
async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
const newTorrent = await _createTorrent(OC, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();
if (!targetFile) {
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
return targetFile;
}
async function _getFileUrls(OC, torrent) {
return OC.cloud.explore(torrent.requestId)
.catch(error => {
if (error === 'Bad archive') {
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
}
throw error;
})
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return ['downloading', 'created'].includes(torrent.status);
}
function statusError(torrent) {
return ['error', 'canceled'].includes(torrent.status);
}
function statusReady(torrent) {
return torrent.status === 'downloaded';
}
function errorExpiredSubscriptionError(error) {
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
}

View File

@@ -0,0 +1,29 @@
export const DebridOptions = {
key: 'debridoptions',
options: {
noDownloadLinks: {
key: 'nodownloadlinks',
description: 'Don\'t show download to debrid links'
},
noCatalog: {
key: 'nocatalog',
description: 'Don\'t show debrid catalog'
},
torrentLinks: {
key: 'torrentlinks',
description: 'Show P2P torrent links for uncached'
}
}
}
export function excludeDownloadLinks(config) {
return config[DebridOptions.key]?.includes(DebridOptions.options.noDownloadLinks.key);
}
export function includeTorrentLinks(config) {
return config[DebridOptions.key]?.includes(DebridOptions.options.torrentLinks.key);
}
export function showDebridCatalog(config) {
return !config[DebridOptions.key]?.includes(DebridOptions.options.noCatalog.key);
}

View File

@@ -0,0 +1,195 @@
import PremiumizeClient from 'premiumize-api';
import magnet from 'magnet-uri';
import { Type } from '../lib/types.js';
import { isVideo, isArchive } from '../lib/extension.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { BadTokenError, chunkArray, sameFilename } from './mochHelper.js';
const KEY = 'premiumize';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
}
async function _getCachedStreams(PM, apiKey, streams) {
const hashes = streams.map(stream => stream.infoHash);
return PM.cache.check(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined;
})
.then(available => streams
.reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available?.response[index]
};
return mochStreams;
}, {}));
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return PM.folder.list()
.then(response => response.content)
.then(torrents => (torrents || [])
.filter(torrent => torrent && torrent.type === 'folder')
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
const rootFolder = await PM.folder.list(itemId, null);
const infoHash = await _findInfoHash(PM, itemId);
return getFolderContents(PM, itemId, ip)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: rootFolder.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at * 1000 - index).toISOString(),
streams: [{ url: file.link || file.stream_link }]
}))
}))
}
async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
return PM.folder.list(itemId, null, ip)
.then(response => response.content)
.then(contents => Promise.all(contents
.filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name))
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
.catch(error => {
if (error?.message?.includes('Account not premium.')) {
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser) {
const torrent = await _createOrFindTorrent(PM, infoHash);
if (torrent && statusReady(torrent.status)) {
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
}
async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBrowser) {
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos.sort((a, b) => b.size - a.size)[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
const unrestrictedLink = streamLink || targetVideo.link;
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
return Promise.reject('No cached entry found');
}
async function _createOrFindTorrent(PM, infoHash) {
return _findTorrent(PM, infoHash)
.catch(() => _createTorrent(PM, infoHash));
}
async function _findTorrent(PM, infoHash) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _findInfoHash(PM, itemId) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
}
async function _createTorrent(PM, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
}
async function _retryCreateTorrent(PM, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
return newTorrent && statusReady(newTorrent.status)
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
export function toCommonError(error) {
if (error && error.message === 'Not logged in.') {
return BadTokenError;
}
return undefined;
}
function statusError(status) {
return ['deleted', 'error', 'timeout'].includes(status);
}
function statusDownloading(status) {
return ['waiting', 'queued', 'running'].includes(status);
}
function statusReady(status) {
return ['finished', 'seeding'].includes(status);
}
async function getDefaultOptions(ip) {
return { timeout: 5000 };
}

View File

@@ -0,0 +1,219 @@
import PutioClient from '@putdotio/api-client'
import { isVideo } from '../lib/extension.js';
import { delay } from '../lib/promises.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { Type } from "../lib/types.js";
import { decode } from "magnet-uri";
import { sameFilename } from "./mochHelper.js";
const PutioAPI = PutioClient.default;
const KEY = 'putio';
export async function getCachedStreams(streams, apiKey) {
return streams
.reduce((mochStreams, stream) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
return mochStreams;
}, {});
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const Putio = createPutioAPI(apiKey)
return Putio.Files.Query(0)
.then(response => response?.body?.files)
.then(files => (files || [])
.map(file => ({
id: `${KEY}:${file.id}`,
type: Type.OTHER,
name: file.name
})));
}
export async function getItemMeta(itemId, apiKey) {
const Putio = createPutioAPI(apiKey)
const infoHash = await _findInfoHash(Putio, itemId)
return getFolderContents(Putio, itemId)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: contents.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at).toISOString(),
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
}))
}))
}
async function getFolderContents(Putio, itemId, folderPrefix = '') {
return await Putio.Files.Query(itemId)
.then(response => response?.body)
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
.then(contents => Promise.all(contents
.filter(content => content.file_type === 'FOLDER')
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.file_type === 'VIDEO')
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
const Putio = createPutioAPI(apiKey)
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (error?.data?.status_code === 401) {
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
});
}
async function _resolve(Putio, infoHash, cachedEntryInfo, fileIndex) {
if (infoHash === 'null') {
return _unrestrictVideo(Putio, fileIndex);
}
const torrent = await _createOrFindTorrent(Putio, infoHash);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject("Failed Putio adding torrent");
}
async function _createOrFindTorrent(Putio, infoHash) {
return _findTorrent(Putio, infoHash)
.catch(() => _createTorrent(Putio, infoHash));
}
async function _retryCreateTorrent(Putio, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(Putio, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _findTorrent(Putio, infoHash) {
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
if (foundTorrents && !foundTorrents.userfile_exists) {
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
}
return foundTorrent || Promise.reject('No recent torrent found in Putio');
}
async function _findInfoHash(Putio, fileId) {
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
}
async function _createTorrent(Putio, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
return Putio.Transfers.Add({ url: magnetLink })
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
}
async function _getNewTorrent(Putio, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return Putio.Transfers.Get(torrentId)
.then(response => response.data.transfer)
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(Putio, torrent, encodedFileName, fileIndex) {
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
return _unrestrictVideo(Putio, targetVideo.id);
}
async function _unrestrictVideo(Putio, videoId) {
const response = await Putio.File.GetStorageURL(videoId);
const downloadUrl = response.data.url
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
return downloadUrl;
}
async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
let targetFile;
let files = await _getFiles(Putio, torrent.file_id);
let videos = [];
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name)));
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))
: [];
}
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
}
async function _getFiles(Putio, fileId) {
const response = await Putio.Files.Query(fileId)
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
return response.data.files.length
? response.data.files
: [response.data.parent];
}
function createPutioAPI(apiKey) {
const clientId = apiKey.replace(/@.*/, '');
const token = apiKey.replace(/.*@/, '');
const Putio = new PutioAPI({ clientID: clientId });
Putio.setToken(token);
return Putio;
}
export function toCommonError(error) {
return undefined;
}
function statusError(status) {
return ['ERROR'].includes(status);
}
function statusDownloading(status) {
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
}
function statusProcessing(status) {
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
}
function statusReady(status) {
return ['COMPLETED', 'SEEDING'].includes(status);
}

View File

@@ -0,0 +1,399 @@
import RealDebridClient from 'real-debrid-api';
import { Type } from '../lib/types.js';
import { isVideo, isArchive } from '../lib/extension.js';
import { delay } from '../lib/promises.js';
import { cacheAvailabilityResults, getCachedAvailabilityResults } from '../lib/cache.js';
import StaticResponse from './static.js';
import { getMagnetLink } from '../lib/magnetHelper.js';
import { chunkArray, BadTokenError, AccessDeniedError } from './mochHelper.js';
const MIN_SIZE = 5 * 1024 * 1024; // 5 MB
const CATALOG_MAX_PAGE = 1;
const CATALOG_PAGE_SIZE = 100;
const NON_BLACKLIST_ERRORS = ['ESOCKETTIMEDOUT', 'EAI_AGAIN', '504 Gateway Time-out'];
const KEY = 'realdebrid';
const DEBRID_DOWNLOADS = 'Downloads';
export async function getCachedStreams(streams, apiKey) {
const hashes = streams.map(stream => stream.infoHash);
const available = await _getInstantAvailable(hashes, apiKey);
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
return mochStreams;
}, {})
}
async function _getInstantAvailable(hashes, apiKey, retries = 3, maxChunkSize = 150) {
const cachedResults = await getCachedAvailabilityResults(hashes);
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
if (!missingHashes.length) {
return cachedResults
}
const options = await getDefaultOptions();
const RD = new RealDebridClient(apiKey, options);
const hashBatches = chunkArray(missingHashes, maxChunkSize)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => {
if (typeof response !== 'object') {
return Promise.reject(new Error('RD returned non JSON response: ' + response));
}
return processAvailabilityResults(response);
})))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
if (!error && maxChunkSize !== 1) {
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
}
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
return _getInstantAvailable(hashes, apiKey, retries - 1);
}
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
return undefined;
});
}
function processAvailabilityResults(availabilityResults) {
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
}
function getCachedIds(hosterResults) {
if (!hosterResults || Array.isArray(hosterResults)) {
return [];
}
// if not all cached files are videos, then the torrent will be zipped to a rar
return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), [])
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length)
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
}
function _getCachedFileIds(fileIndex, cachedResults) {
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
}
export async function getCatalog(apiKey, offset, ip) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const downloadsMeta = {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS
};
const torrentMetas = await _getAllTorrents(RD)
.then(torrents => Array.isArray(torrents) ? torrents : [])
.then(torrents => torrents
.filter(torrent => torrent && statusReady(torrent.status))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
return [downloadsMeta].concat(torrentMetas)
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
if (itemId === DEBRID_DOWNLOADS) {
const videos = await _getAllDownloads(RD)
.then(downloads => downloads
.map(download => ({
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
// infoHash: allTorrents
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
// .map(torrent => torrent.hash.toLowerCase())[0],
title: download.filename,
released: new Date(download.generated).toISOString(),
streams: [{ url: download.download }]
})));
return {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS,
videos: videos
};
}
return _getTorrentInfo(RD, itemId)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.files
.filter(file => file.selected)
.filter(file => isVideo(file.path))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${file.id}`,
title: file.path,
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
}))
}))
}
async function _getAllTorrents(RD, page = 1) {
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
? _getAllTorrents(RD, page + 1)
.then(nextTorrents => torrents.concat(nextTorrents))
.catch(() => torrents)
: torrents)
}
async function _getAllDownloads(RD, page = 1) {
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
.catch(error => {
if (accessDeniedError(error)) {
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
if (infringingFile(error)) {
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_INFRINGEMENT;
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolveCachedFileIds(infoHash, fileIndex, apiKey) {
const available = await _getInstantAvailable([infoHash], apiKey);
const cachedEntry = available?.[infoHash];
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
return cachedIds?.join(',');
}
async function _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser) {
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
const torrent = await _getTorrentInfo(RD, torrentId);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusMagnetError(torrent.status)) {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
return StaticResponse.FAILED_OPENING;
} else if (torrent && statusError(torrent.status)) {
return _retryCreateTorrent(RD, infoHash, fileIndex);
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
return _selectTorrentFiles(RD, torrent)
.then(() => {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING
})
.catch(error => {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
return StaticResponse.FAILED_OPENING;
});
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex) {
return _findTorrent(RD, infoHash, fileIndex)
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
}
async function _findTorrent(RD, infoHash, fileIndex) {
const torrents = await RD.torrents.get(0, 1) || [];
const foundTorrents = torrents
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
.filter(torrent => !statusError(torrent.status));
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
return foundTorrent?.id || Promise.reject('No recent torrent found');
}
async function _findBestFitTorrent(RD, torrents, fileIndex) {
if (torrents.length === 1) {
return torrents[0];
}
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
const bestFitTorrents = torrentInfos
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
.sort((a, b) => b.links.length - a.links.length);
return bestFitTorrents[0] || torrents[0];
}
async function _getTorrentInfo(RD, torrentId) {
if (!torrentId || typeof torrentId === 'object') {
return torrentId || Promise.reject('No RealDebrid torrentId provided')
}
return RD.torrents.info(torrentId);
}
async function _createTorrentId(RD, infoHash, cachedFileIds) {
const magnetLink = await getMagnetLink(infoHash);
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
}
return addedMagnet.id;
}
async function _recreateTorrentId(RD, infoHash, fileIndex) {
const newTorrentId = await _createTorrentId(RD, infoHash);
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
return newTorrentId;
}
async function _retryCreateTorrent(RD, infoHash, fileIndex) {
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(RD, newTorrent, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _selectTorrentFiles(RD, torrent, fileIndex) {
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
if (torrent?.files && statusWaitingSelection(torrent.status)) {
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
.filter(file => isVideo(file.path))
.filter(file => file.bytes > MIN_SIZE)
.map(file => file.id)
.join(',');
return RD.torrents.selectFiles(torrent.id, videoFileIds);
}
return Promise.reject('Failed RealDebrid torrent file selection')
}
async function _openTorrent(RD, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return _getTorrentInfo(RD, torrentId)
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(RD, torrent, fileIndex, isBrowser) {
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
if (!targetFile.selected) {
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
return StaticResponse.DOWNLOADING;
}
const selectedFiles = torrent.files.filter(file => file.selected);
const fileLink = torrent.links.length === 1
? torrent.links[0]
: torrent.links[selectedFiles.indexOf(targetFile)];
if (!fileLink?.length) {
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
}
async function _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser) {
return RD.unrestrict.link(fileLink)
.then(response => {
if (isArchive(response.download)) {
if (torrent.files.filter(file => file.selected).length > 1) {
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return StaticResponse.FAILED_RAR;
}
// if (isBrowser && response.streamable) {
// return RD.streaming.transcode(response.id)
// .then(streamResponse => streamResponse.apple.full)
// }
return response.download;
})
.then(unrestrictedLink => {
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
})
.catch(error => {
if (error.code === 19) {
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
}
return Promise.reject(error);
});
}
export function toCommonError(error) {
if (error && error.code === 8) {
return BadTokenError;
}
if (error && accessDeniedError(error)) {
return AccessDeniedError;
}
return undefined;
}
function statusError(status) {
return ['error', 'magnet_error'].includes(status);
}
function statusMagnetError(status) {
return status === 'magnet_error';
}
function statusOpening(status) {
return status === 'magnet_conversion';
}
function statusWaitingSelection(status) {
return status === 'waiting_files_selection';
}
function statusDownloading(status) {
return ['downloading', 'uploading', 'queued'].includes(status);
}
function statusReady(status) {
return ['downloaded', 'dead'].includes(status);
}
function accessDeniedError(error) {
return [9, 20].includes(error?.code);
}
function infringingFile(error) {
return error && error.code === 35;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
}

View File

@@ -0,0 +1,16 @@
const staticVideoUrls = {
DOWNLOADING: `videos/downloading_v2.mp4`,
FAILED_DOWNLOAD: `videos/download_failed_v2.mp4`,
FAILED_ACCESS: `videos/failed_access_v2.mp4`,
FAILED_RAR: `videos/failed_rar_v2.mp4`,
FAILED_OPENING: `videos/failed_opening_v2.mp4`,
FAILED_UNEXPECTED: `videos/failed_unexpected_v2.mp4`,
FAILED_INFRINGEMENT: `videos/failed_infringement_v2.mp4`
}
export function isStaticUrl(url) {
return Object.values(staticVideoUrls).some(videoUrl => url?.endsWith(videoUrl));
}
export default staticVideoUrls

2568
src/node/addon/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,39 @@
{
"name": "selfhostio-selfhostio",
"version": "1.0.0",
"exports": "./index.js",
"type": "module",
"scripts": {
"start": "node index.js"
},
"license": "MIT",
"dependencies": {
"@putdotio/api-client": "^8.42.0",
"all-debrid-api": "^1.1.0",
"axios": "^1.6.1",
"bottleneck": "^2.19.5",
"cache-manager": "^3.4.4",
"cache-manager-mongodb": "^0.3.0",
"cors": "^2.8.5",
"debrid-link-api": "^1.0.1",
"express-rate-limit": "^6.7.0",
"ip": "^1.1.8",
"magnet-uri": "^6.2.0",
"name-to-imdb": "^3.0.4",
"named-queue": "^2.2.1",
"offcloud-api": "^1.0.2",
"parse-torrent-title": "git://github.com/TheBeastLT/parse-torrent-title.git#022408972c2a040f846331a912a6a8487746a654",
"pg": "^8.11.3",
"pg-hstore": "^2.3.4",
"premiumize-api": "^1.0.3",
"prom-client": "^12.0.0",
"real-debrid-api": "git://github.com/TheBeastLT/node-real-debrid.git#d1f7eaa8593b947edbfbc8a92a176448b48ef445",
"request-ip": "^3.3.0",
"router": "^1.3.8",
"sequelize": "^6.31.1",
"stremio-addon-sdk": "^1.6.10",
"swagger-stats": "^0.99.7",
"ua-parser-js": "^1.0.36",
"user-agents": "^1.0.1444"
}
}

View File

@@ -0,0 +1,107 @@
import Router from 'router';
import cors from 'cors';
import rateLimit from "express-rate-limit";
import requestIp from 'request-ip';
import userAgentParser from 'ua-parser-js';
import addonInterface from './addon.js';
import qs from 'querystring';
import { manifest } from './lib/manifest.js';
import { parseConfiguration } from './lib/configuration.js';
import landingTemplate from './lib/landingTemplate.js';
import * as moch from './moch/moch.js';
const router = new Router();
const limiter = rateLimit({
windowMs: 60 * 60 * 1000, // 1 hour
max: 3000000, // limit each IP to 300 requests per windowMs
headers: false,
keyGenerator: (req) => requestIp.getClientIp(req)
})
router.use(cors())
router.get('/', (_, res) => {
res.redirect('/configure')
res.end();
});
router.get('/:configuration?/configure', (req, res) => {
const configValues = parseConfiguration(req.params.configuration || '');
const landingHTML = landingTemplate(manifest(configValues), configValues);
res.setHeader('content-type', 'text/html');
res.end(landingHTML);
});
router.get('/:configuration?/manifest.json', (req, res) => {
const configValues = parseConfiguration(req.params.configuration || '');
const manifestBuf = JSON.stringify(manifest(configValues));
res.setHeader('Content-Type', 'application/json; charset=utf-8');
res.end(manifestBuf)
});
router.get('/:configuration?/:resource/:type/:id/:extra?.json', limiter, (req, res, next) => {
const { configuration, resource, type, id } = req.params;
const extra = req.params.extra ? qs.parse(req.url.split('/').pop().slice(0, -5)) : {}
const ip = requestIp.getClientIp(req);
const host = `${req.protocol}://${req.headers.host}`;
const configValues = { ...extra, ...parseConfiguration(configuration), id, type, ip, host };
addonInterface.get(resource, type, id, configValues)
.then(resp => {
const cacheHeaders = {
cacheMaxAge: 'max-age',
staleRevalidate: 'stale-while-revalidate',
staleError: 'stale-if-error'
};
const cacheControl = Object.keys(cacheHeaders)
.map(prop => Number.isInteger(resp[prop]) && cacheHeaders[prop] + '=' + resp[prop])
.filter(val => !!val).join(', ');
res.setHeader('Cache-Control', `${cacheControl}, public`);
res.setHeader('Content-Type', 'application/json; charset=utf-8');
res.end(JSON.stringify(resp));
})
.catch(err => {
if (err.noHandler) {
if (next) {
next()
} else {
res.writeHead(404);
res.end(JSON.stringify({ err: 'not found' }));
}
} else {
console.error(err);
res.writeHead(500);
res.end(JSON.stringify({ err: 'handler error' }));
}
});
});
router.get('/:moch/:apiKey/:infoHash/:cachedEntryInfo/:fileIndex/:filename?', (req, res) => {
const userAgent = req.headers['user-agent'] || '';
const parameters = {
mochKey: req.params.moch,
apiKey: req.params.apiKey,
infoHash: req.params.infoHash.toLowerCase(),
fileIndex: isNaN(req.params.fileIndex) ? undefined : parseInt(req.params.fileIndex),
cachedEntryInfo: req.params.cachedEntryInfo,
ip: requestIp.getClientIp(req),
host: `${req.protocol}://${req.headers.host}`,
isBrowser: !userAgent.includes('Stremio') && !!userAgentParser(userAgent).browser.name
}
moch.resolve(parameters)
.then(url => {
res.writeHead(302, { Location: url });
res.end();
})
.catch(error => {
console.log(error);
res.statusCode = 404;
res.end();
});
});
export default function (req, res) {
router(req, res, function () {
res.statusCode = 404;
res.end();
});
};

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,2 @@
build.sh
node_modules/

View File

@@ -0,0 +1,16 @@
env:
es2021: true
node: true
extends: eslint:recommended
plugins:
- import
rules:
import/no-unresolved: 2
import/no-commonjs: 2
import/extensions:
- 2
- ignorePackages
parserOptions:
ecmaVersion: latest
sourceType: module

View File

@@ -0,0 +1,18 @@
FROM node:lts-buster-slim
# RUN apk update && apk upgrade && \
# apk add --no-cache git curl
RUN apt-get update && \
apt-get install -y curl git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /home/node/app
COPY package*.json ./
RUN npm ci --only-production
COPY . .
RUN chmod a+x ./check-ip.sh
CMD [ "node", "--no-warnings=ExperimentalWarning", "index.js" ]

View File

@@ -0,0 +1,5 @@
#!/bin/sh
CURRENT_IP="$(curl -s http://whatismyip.akamai.com)"
clear
echo "Current IP: $CURRENT_IP"

View File

@@ -0,0 +1,11 @@
import { getTrackers } from "./lib/trackerService.js";
import { connect } from './lib/repository.js';
import { listenToQueue } from './jobs/processTorrents.js';
import { jobConfig } from "./lib/config.js";
await getTrackers();
await connect();
if (jobConfig.JOBS_ENABLED) {
await listenToQueue();
}

View File

@@ -0,0 +1,34 @@
import { rabbitConfig, jobConfig } from '../lib/config.js'
import { processTorrentRecord } from "../lib/ingestedTorrent.js";
import amqp from 'amqplib'
import Promise from 'bluebird'
const assertQueueOptions = { durable: true }
const consumeQueueOptions = { noAck: false }
const processMessage = msg =>
Promise.resolve(getMessageAsJson(msg))
.then(torrent => processTorrentRecord(torrent))
.then(() => Promise.resolve(msg));
const getMessageAsJson = msg => {
const torrent = JSON.parse(msg.content.toString());
return Promise.resolve(torrent.message);
}
const assertAndConsumeQueue = channel => {
console.log('Worker is running! Waiting for new torrents...')
const ackMsg = msg => Promise.resolve(msg)
.then(msg => processMessage(msg))
.then(msg => channel.ack(msg))
.catch(error => console.error('Failed processing torrent', error));
return channel.assertQueue(rabbitConfig.QUEUE_NAME, assertQueueOptions)
.then(() => channel.prefetch(jobConfig.JOB_CONCURRENCY))
.then(() => channel.consume(rabbitConfig.QUEUE_NAME, ackMsg, consumeQueueOptions))
}
export const listenToQueue = () => amqp.connect(rabbitConfig.URI)
.then(connection => connection.createChannel())
.then(channel => assertAndConsumeQueue(channel))

View File

@@ -0,0 +1,72 @@
import { cacheConfig } from './config.js';
import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb';
const GLOBAL_KEY_PREFIX = 'selfhostio-consumer';
const IMDB_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|imdb_id`;
const KITSU_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|kitsu_id`;
const METADATA_PREFIX = `${GLOBAL_KEY_PREFIX}|metadata`;
const TRACKERS_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|trackers`;
const GLOBAL_TTL = process.env.METADATA_TTL || 7 * 24 * 60 * 60; // 7 days
const MEMORY_TTL = process.env.METADATA_TTL || 2 * 60 * 60; // 2 hours
const TRACKERS_TTL = 2 * 24 * 60 * 60; // 2 days
const memoryCache = initiateMemoryCache();
const remoteCache = initiateRemoteCache();
function initiateRemoteCache() {
if (cacheConfig.NO_CACHE) {
return null;
} else if (cacheConfig.MONGO_URI) {
return cacheManager.caching({
store: mangodbStore,
uri: cacheConfig.MONGO_URI,
options: {
collection: cacheConfig.COLLECTION_NAME,
socketTimeoutMS: 120000,
useNewUrlParser: true,
useUnifiedTopology: false,
ttl: GLOBAL_TTL
},
ttl: GLOBAL_TTL,
ignoreCacheErrors: true
});
} else {
return cacheManager.caching({
store: 'memory',
ttl: MEMORY_TTL
});
}
}
function initiateMemoryCache() {
return cacheManager.caching({
store: 'memory',
ttl: MEMORY_TTL,
max: Infinity // infinite LRU cache size
});
}
function cacheWrap(cache, key, method, options) {
if (cacheConfig.NO_CACHE || !cache) {
return method();
}
return cache.wrap(key, method, options);
}
export function cacheWrapImdbId(key, method) {
return cacheWrap(remoteCache, `${IMDB_ID_PREFIX}:${key}`, method, { ttl: GLOBAL_TTL });
}
export function cacheWrapKitsuId(key, method) {
return cacheWrap(remoteCache, `${KITSU_ID_PREFIX}:${key}`, method, { ttl: GLOBAL_TTL });
}
export function cacheWrapMetadata(id, method) {
return cacheWrap(memoryCache, `${METADATA_PREFIX}:${id}`, method, { ttl: MEMORY_TTL });
}
export function cacheTrackers(method) {
return cacheWrap(memoryCache, `${TRACKERS_KEY_PREFIX}`, method, { ttl: TRACKERS_TTL });
}

View File

@@ -0,0 +1,45 @@
export const rabbitConfig = {
URI: process.env.RABBIT_URI || 'amqp://localhost',
QUEUE_NAME: process.env.QUEUE_NAME || 'test-queue'
}
export const cacheConfig = {
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin',
NO_CACHE: parseBool(process.env.NO_CACHE, false),
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection'
}
export const databaseConfig = {
DATABASE_URI: process.env.POSTGRES_DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/selfhostio',
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true)
}
export const jobConfig = {
JOB_CONCURRENCY: parseInt(process.env.JOB_CONCURRENCY || 1),
JOBS_ENABLED: parseBool(process.env.JOBS_ENABLED || true)
}
export const metadataConfig = {
IMDB_CONCURRENT: parseInt(process.env.IMDB_CONCURRENT || 1),
IMDB_INTERVAL_MS: parseInt(process.env.IMDB_INTERVAL_MS || 1000),
}
export const trackerConfig = {
TRACKERS_URL: process.env.TRACKERS_URL || 'https://ngosang.github.io/trackerslist/trackers_all.txt',
UDP_ENABLED: parseBool(process.env.UDP_TRACKERS_ENABLED || false),
}
export const torrentConfig = {
MAX_CONNECTIONS_PER_TORRENT: parseInt(process.env.MAX_SINGLE_TORRENT_CONNECTIONS || 20),
TIMEOUT: parseInt(process.env.TORRENT_TIMEOUT || 30000),
}
function parseBool(boolString, defaultValue) {
const isString = typeof boolString === 'string' || boolString instanceof String;
if (!isString) {
return defaultValue;
}
return boolString.toLowerCase() === 'true' ? true : defaultValue;
}

View File

@@ -0,0 +1,62 @@
const VIDEO_EXTENSIONS = [
"3g2",
"3gp",
"avi",
"flv",
"mkv",
"mk3d",
"mov",
"mp2",
"mp4",
"m4v",
"mpe",
"mpeg",
"mpg",
"mpv",
"webm",
"wmv",
"ogm",
"divx"
];
const SUBTITLE_EXTENSIONS = [
"aqt",
"gsub",
"jss",
"sub",
"ttxt",
"pjs",
"psb",
"rt",
"smi",
"slt",
"ssf",
"srt",
"ssa",
"ass",
"usf",
"idx",
"vtt"
];
const DISK_EXTENSIONS = [
"iso",
"m2ts",
"ts",
"vob"
]
export function isVideo(filename) {
return isExtension(filename, VIDEO_EXTENSIONS);
}
export function isSubtitle(filename) {
return isExtension(filename, SUBTITLE_EXTENSIONS);
}
export function isDisk(filename) {
return isExtension(filename, DISK_EXTENSIONS);
}
export function isExtension(filename, extensions) {
const extensionMatch = filename.match(/\.(\w{2,4})$/);
return extensionMatch && extensions.includes(extensionMatch[1].toLowerCase());
}

View File

@@ -0,0 +1,45 @@
import { Type } from './types.js';
import { createTorrentEntry, checkAndUpdateTorrent } from './torrentEntries.js';
import {getTrackers} from "./trackerService.js";
export async function processTorrentRecord(torrent) {
const category = torrent.category;
const type = category === 'tv' ? Type.SERIES : Type.MOVIE;
const torrentInfo = await parseTorrent(torrent, type);
console.log(`Processing torrent ${torrentInfo.title} with infoHash ${torrentInfo.infoHash}`)
if (await checkAndUpdateTorrent(torrentInfo)) {
return torrentInfo;
}
return createTorrentEntry(torrentInfo);
}
async function assignTorrentTrackers() {
const trackers = await getTrackers();
return trackers.join(',');
}
async function parseTorrent(torrent, category) {
const infoHash = torrent.infoHash?.trim().toLowerCase()
return {
title: torrent.name,
torrentId: `${torrent.name}_${infoHash}`,
infoHash: infoHash,
seeders: 100,
size: torrent.size,
uploadDate: torrent.createdAt,
imdbId: parseImdbId(torrent),
type: category,
provider: torrent.source,
trackers: await assignTorrentTrackers(),
}
}
function parseImdbId(torrent) {
if (torrent.imdb === undefined || torrent.imdb === null) {
return undefined;
}
return torrent.imdb;
}

View File

@@ -0,0 +1,165 @@
import axios from 'axios';
import nameToImdb from 'name-to-imdb';
import { search } from 'google-sr';
import { cacheWrapImdbId, cacheWrapKitsuId, cacheWrapMetadata } from './cache.js';
import { Type } from './types.js';
const CINEMETA_URL = 'https://v3-cinemeta.strem.io';
const KITSU_URL = 'https://anime-kitsu.strem.fun';
const TIMEOUT = 20000;
export function getMetadata(id, type = Type.SERIES) {
if (!id) {
return Promise.reject("no valid id provided");
}
const key = Number.isInteger(id) || id.match(/^\d+$/) ? `kitsu:${id}` : id;
const metaType = type === Type.MOVIE ? Type.MOVIE : Type.SERIES;
return cacheWrapMetadata(key, () => _requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`)
.catch(() => _requestMetadata(`${CINEMETA_URL}/meta/${metaType}/${key}.json`))
.catch(() => {
// try different type in case there was a mismatch
const otherType = metaType === Type.MOVIE ? Type.SERIES : Type.MOVIE;
return _requestMetadata(`${CINEMETA_URL}/meta/${otherType}/${key}.json`)
})
.catch((error) => {
throw new Error(`failed metadata query ${key} due: ${error.message}`);
}));
}
function _requestMetadata(url) {
return axios.get(url, { timeout: TIMEOUT })
.then((response) => {
const body = response.data;
if (body && body.meta && (body.meta.imdb_id || body.meta.kitsu_id)) {
return {
kitsuId: body.meta.kitsu_id,
imdbId: body.meta.imdb_id,
type: body.meta.type,
title: body.meta.name,
year: body.meta.year,
country: body.meta.country,
genres: body.meta.genres,
status: body.meta.status,
videos: (body.meta.videos || [])
.map((video) => Number.isInteger(video.imdbSeason)
? {
name: video.name || video.title,
season: video.season,
episode: video.episode,
imdbSeason: video.imdbSeason,
imdbEpisode: video.imdbEpisode
}
: {
name: video.name || video.title,
season: video.season,
episode: video.episode,
kitsuId: video.kitsu_id,
kitsuEpisode: video.kitsuEpisode,
released: video.released
}
),
episodeCount: Object.values((body.meta.videos || [])
.filter((entry) => entry.season !== 0 && entry.episode !== 0)
.sort((a, b) => a.season - b.season)
.reduce((map, next) => {
map[next.season] = map[next.season] + 1 || 1;
return map;
}, {})),
totalCount: body.meta.videos && body.meta.videos
.filter((entry) => entry.season !== 0 && entry.episode !== 0).length
};
} else {
throw new Error('No search results');
}
});
}
export function escapeTitle(title) {
return title.toLowerCase()
.normalize('NFKD') // normalize non-ASCII characters
.replace(/[\u0300-\u036F]/g, '')
.replace(/&/g, 'and')
.replace(/[;, ~./]+/g, ' ') // replace dots, commas or underscores with spaces
.replace(/[^\w \-()×+#@!'\u0400-\u04ff]+/g, '') // remove all non-alphanumeric chars
.replace(/^\d{1,2}[.#\s]+(?=(?:\d+[.\s]*)?[\u0400-\u04ff])/i, '') // remove russian movie numbering
.replace(/\s{2,}/, ' ') // replace multiple spaces
.trim();
}
export async function getImdbId(info, type) {
const name = escapeTitle(info.title);
const year = info.year || (info.date && info.date.slice(0, 4));
const key = `${name}_${year || 'NA'}_${type}`;
const query = `${name} ${year || ''} ${type} imdb`;
const fallbackQuery = `${name} ${type} imdb`;
const googleQuery = year ? query : fallbackQuery;
try {
const imdbId = await cacheWrapImdbId(key,
() => getIMDbIdFromNameToImdb(name, info.year, type)
);
return imdbId && 'tt' + imdbId.replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0');
} catch (error) {
const imdbIdFallback = await getIMDbIdFromGoogle(googleQuery);
return imdbIdFallback && 'tt' + imdbIdFallback.replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0');
}
}
function getIMDbIdFromNameToImdb(name, year, type) {
return new Promise((resolve, reject) => {
nameToImdb({ name, year, type }, function(err, res) {
if (res) {
resolve(res);
} else {
reject(err || new Error('Failed IMDbId search'));
}
});
});
}
async function getIMDbIdFromGoogle(query) {
try {
const searchResults = await search({ query: query });
for (const result of searchResults) {
if (result.link.includes('imdb.com/title/')) {
const match = result.link.match(/imdb\.com\/title\/(tt\d+)/);
if (match) {
return match[1];
}
}
}
return undefined;
}
catch (error) {
throw new Error('Failed to find IMDb ID from Google search');
}
}
export async function getKitsuId(info) {
const title = escapeTitle(info.title.replace(/\s\|\s.*/, ''));
const year = info.year ? ` ${info.year}` : '';
const season = info.season > 1 ? ` S${info.season}` : '';
const key = `${title}${year}${season}`;
const query = encodeURIComponent(key);
return cacheWrapKitsuId(key,
() => axios.get(`${KITSU_URL}/catalog/series/kitsu-anime-list/search=${query}.json`, { timeout: 60000 })
.then((response) => {
const body = response.data;
if (body && body.metas && body.metas.length) {
return body.metas[0].id.replace('kitsu:', '');
} else {
throw new Error('No search results');
}
}));
}
export async function isEpisodeImdbId(imdbId) {
if (!imdbId) {
return false;
}
return axios.get(`https://www.imdb.com/title/${imdbId}/`, { timeout: 10000 })
.then(response => !!(response.data && response.data.includes('video.episode')))
.catch(() => false);
}

View File

@@ -0,0 +1,98 @@
import { parse } from 'parse-torrent-title';
import { Type } from './types.js';
const MULTIPLE_FILES_SIZE = 4 * 1024 * 1024 * 1024; // 4 GB
export function parseSeriesVideos(torrent, videos) {
const parsedTorrentName = parse(torrent.title);
const hasMovies = parsedTorrentName.complete || !!torrent.title.match(/movies?(?:\W|$)/i);
const parsedVideos = videos.map(video => parseSeriesVideo(video, parsedTorrentName));
return parsedVideos.map(video => ({ ...video, isMovie: isMovieVideo(video, parsedVideos, torrent.type, hasMovies) }));
}
function parseSeriesVideo(video, parsedTorrentName) {
const videoInfo = parse(video.name);
// the episode may be in a folder containing season number
if (!Number.isInteger(videoInfo.season) && video.path.includes('/')) {
const folders = video.path.split('/');
const pathInfo = parse(folders[folders.length - 2]);
videoInfo.season = pathInfo.season;
}
if (!Number.isInteger(videoInfo.season) && parsedTorrentName.season) {
videoInfo.season = parsedTorrentName.season;
}
if (!Number.isInteger(videoInfo.season) && videoInfo.seasons && videoInfo.seasons.length > 1) {
// in case single file was interpreted as having multiple seasons
videoInfo.season = videoInfo.seasons[0];
}
if (!Number.isInteger(videoInfo.season) && video.path.includes('/') && parsedTorrentName.seasons
&& parsedTorrentName.seasons.length > 1) {
// russian season are usually named with 'series name-2` i.e. Улицы разбитых фонарей-6/22. Одиночный выстрел.mkv
const folderPathSeasonMatch = video.path.match(/[\u0400-\u04ff]-(\d{1,2})(?=.*\/)/);
videoInfo.season = folderPathSeasonMatch && parseInt(folderPathSeasonMatch[1], 10) || undefined;
}
// sometimes video file does not have correct date format as in torrent title
if (!videoInfo.episodes && !videoInfo.date && parsedTorrentName.date) {
videoInfo.date = parsedTorrentName.date;
}
// limit number of episodes in case of incorrect parsing
if (videoInfo.episodes && videoInfo.episodes.length > 20) {
videoInfo.episodes = [videoInfo.episodes[0]];
videoInfo.episode = videoInfo.episodes[0];
}
// force episode to any found number if it was not parsed
if (!videoInfo.episodes && !videoInfo.date) {
const epMatcher = videoInfo.title.match(
/(?<!season\W*|disk\W*|movie\W*|film\W*)(?:^|\W|_)(\d{1,4})(?:a|b|c|v\d)?(?:_|\W|$)(?!disk|movie|film)/i);
videoInfo.episodes = epMatcher && [parseInt(epMatcher[1], 10)];
videoInfo.episode = videoInfo.episodes && videoInfo.episodes[0];
}
if (!videoInfo.episodes && !videoInfo.date) {
const epMatcher = video.name.match(new RegExp(`(?:\\(${videoInfo.year}\\)|part)[._ ]?(\\d{1,3})(?:\\b|_)`, "i"));
videoInfo.episodes = epMatcher && [parseInt(epMatcher[1], 10)];
videoInfo.episode = videoInfo.episodes && videoInfo.episodes[0];
}
return { ...video, ...videoInfo };
}
function isMovieVideo(video, otherVideos, type, hasMovies) {
if (Number.isInteger(video.season) && Array.isArray(video.episodes)) {
// not movie if video has season
return false;
}
if (video.name.match(/\b(?:\d+[ .]movie|movie[ .]\d+)\b/i)) {
// movie if video explicitly has numbered movie keyword in the name, ie. 1 Movie or Movie 1
return true;
}
if (!hasMovies && type !== Type.ANIME) {
// not movie if torrent name does not contain movies keyword or is not a pack torrent and is not anime
return false;
}
if (!video.episodes) {
// movie if there's no episode info it could be a movie
return true;
}
// movie if contains year info and there aren't more than 3 video with same title and year
// as some series titles might contain year in it.
return !!video.year
&& otherVideos.length > 3
&& otherVideos.filter(other => other.title === video.title && other.year === video.year) < 3;
}
export function isPackTorrent(torrent) {
if (torrent.pack) {
return true;
}
const parsedInfo = parse(torrent.title);
if (torrent.type === Type.MOVIE) {
return parsedInfo.complete || typeof parsedInfo.year === 'string' || /movies/i.test(torrent.title);
}
const hasMultipleEpisodes = parsedInfo.complete ||
torrent.size > MULTIPLE_FILES_SIZE ||
(parsedInfo.seasons && parsedInfo.seasons.length > 1) ||
(parsedInfo.episodes && parsedInfo.episodes.length > 1) ||
(parsedInfo.seasons && !parsedInfo.episodes);
const hasSingleEpisode = Number.isInteger(parsedInfo.episode) || (!parsedInfo.episodes && parsedInfo.date);
return hasMultipleEpisodes && !hasSingleEpisode;
}

View File

@@ -0,0 +1,55 @@
/**
* Execute promises in sequence one after another.
*/
export async function sequence(promises) {
return promises.reduce((promise, func) =>
promise.then(result => func().then(Array.prototype.concat.bind(result))), Promise.resolve([]));
}
/**
* Return first resolved promise as the result.
*/
export async function first(promises) {
return Promise.all(promises.map((p) => {
// If a request fails, count that as a resolution so it will keep
// waiting for other possible successes. If a request succeeds,
// treat it as a rejection so Promise.all immediately bails out.
return p.then(
(val) => Promise.reject(val),
(err) => Promise.resolve(err)
);
})).then(
// If '.all' resolved, we've just got an array of errors.
(errors) => Promise.reject(errors),
// If '.all' rejected, we've got the result we wanted.
(val) => Promise.resolve(val)
);
}
/**
* Delay promise
*/
export async function delay(duration) {
return new Promise((resolve) => setTimeout(resolve, duration));
}
/**
* Timeout promise after a set time in ms
*/
export async function timeout(timeoutMs, promise, message = 'Timed out') {
return Promise.race([
promise,
new Promise(function (resolve, reject) {
setTimeout(function () {
reject(message);
}, timeoutMs);
})
]);
}
/**
* Return most common value from given array.
*/
export function mostCommonValue(array) {
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
}

View File

@@ -0,0 +1,379 @@
import moment from 'moment';
import * as Promises from './promises.js';
import { Sequelize, Op, DataTypes, fn, col, literal } from 'sequelize';
import { databaseConfig } from './config.js';
const database = new Sequelize(
databaseConfig.DATABASE_URI,
{
logging: false
}
);
const Provider = database.define('provider', {
name: { type: DataTypes.STRING(32), primaryKey: true },
lastScraped: { type: DataTypes.DATE },
lastScrapedId: { type: DataTypes.STRING(128) }
});
const IngestedTorrent = database.define('ingested_torrent', {
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
name: DataTypes.STRING,
source: DataTypes.STRING,
category: DataTypes.STRING,
info_hash: DataTypes.STRING,
size: DataTypes.STRING,
seeders: DataTypes.INTEGER,
leechers: DataTypes.INTEGER,
imdb: DataTypes.STRING,
processed: {
type: DataTypes.BOOLEAN,
defaultValue: false
}},
{
indexes: [
{
unique: true,
fields: ['source', 'info_hash']
}
]
})
/* eslint-disable no-unused-vars */
const IngestedPage = database.define('ingested_page', {
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
url: { type: DataTypes.STRING, allowNull: false },
},
{
indexes: [
{
unique: true,
fields: ['url']
}
]
})
/* eslint-enable no-unused-vars */
const Torrent = database.define('torrent',
{
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
provider: { type: DataTypes.STRING(32), allowNull: false },
torrentId: { type: DataTypes.STRING(512) },
title: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT },
type: { type: DataTypes.STRING(16), allowNull: false },
uploadDate: { type: DataTypes.DATE, allowNull: false },
seeders: { type: DataTypes.SMALLINT },
trackers: { type: DataTypes.STRING(8000) },
languages: { type: DataTypes.STRING(4096) },
resolution: { type: DataTypes.STRING(16) },
reviewed: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
opened: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false }
}
);
const File = database.define('file',
{
id: { type: DataTypes.BIGINT, autoIncrement: true, primaryKey: true },
infoHash: {
type: DataTypes.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: { type: DataTypes.INTEGER },
title: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT },
imdbId: { type: DataTypes.STRING(32) },
imdbSeason: { type: DataTypes.INTEGER },
imdbEpisode: { type: DataTypes.INTEGER },
kitsuId: { type: DataTypes.INTEGER },
kitsuEpisode: { type: DataTypes.INTEGER }
},
{
indexes: [
{
unique: true,
name: 'files_unique_file_constraint',
fields: [
col('infoHash'),
fn('COALESCE', (col('fileIndex')), -1),
fn('COALESCE', (col('imdbId')), 'null'),
fn('COALESCE', (col('imdbSeason')), -1),
fn('COALESCE', (col('imdbEpisode')), -1),
fn('COALESCE', (col('kitsuId')), -1),
fn('COALESCE', (col('kitsuEpisode')), -1)
]
},
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
]
}
);
const Subtitle = database.define('subtitle',
{
infoHash: {
type: DataTypes.STRING(64),
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: {
type: DataTypes.INTEGER,
allowNull: false
},
fileId: {
type: DataTypes.BIGINT,
allowNull: true,
references: { model: File, key: 'id' },
onDelete: 'SET NULL'
},
title: { type: DataTypes.STRING(512), allowNull: false },
},
{
timestamps: false,
indexes: [
{
unique: true,
name: 'subtitles_unique_subtitle_constraint',
fields: [
col('infoHash'),
col('fileIndex'),
fn('COALESCE', (col('fileId')), -1)
]
},
{ unique: false, fields: ['fileId'] }
]
}
);
const Content = database.define('content',
{
infoHash: {
type: DataTypes.STRING(64),
primaryKey: true,
allowNull: false,
references: { model: Torrent, key: 'infoHash' },
onDelete: 'CASCADE'
},
fileIndex: {
type: DataTypes.INTEGER,
primaryKey: true,
allowNull: false
},
path: { type: DataTypes.STRING(512), allowNull: false },
size: { type: DataTypes.BIGINT },
},
{
timestamps: false,
}
);
const SkipTorrent = database.define('skip_torrent', {
infoHash: { type: DataTypes.STRING(64), primaryKey: true },
});
Torrent.hasMany(File, { foreignKey: 'infoHash', constraints: false });
File.belongsTo(Torrent, { foreignKey: 'infoHash', constraints: false });
Torrent.hasMany(Content, { foreignKey: 'infoHash', constraints: false });
Content.belongsTo(Torrent, { foreignKey: 'infoHash', constraints: false });
File.hasMany(Subtitle, { foreignKey: 'fileId', constraints: false });
Subtitle.belongsTo(File, { foreignKey: 'fileId', constraints: false });
export function connect() {
if (databaseConfig.ENABLE_SYNC) {
return database.sync({ alter: true })
.catch(error => {
console.error('Failed syncing database: ', error);
throw error;
});
}
return Promise.resolve();
}
export function getProvider(provider) {
return Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider })
.then((result) => result[0])
.catch(() => provider);
}
export function getTorrent(torrent) {
const where = torrent.infoHash
? { infoHash: torrent.infoHash }
: { provider: torrent.provider, torrentId: torrent.torrentId }
return Torrent.findOne({ where: where });
}
export function getTorrentsBasedOnTitle(titleQuery, type) {
return getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type: type });
}
export function getTorrentsBasedOnQuery(where) {
return Torrent.findAll({ where: where });
}
export function getFilesBasedOnQuery(where) {
return File.findAll({ where: where });
}
export function getUnprocessedIngestedTorrents() {
return IngestedTorrent.findAll({
where: {
processed: false,
category: {
[Op.or]: ['tv', 'movies']
}
},
});
}
export function setIngestedTorrentsProcessed(ingestedTorrents) {
return Promises.sequence(ingestedTorrents
.map(ingestedTorrent => () => {
ingestedTorrent.processed = true;
return ingestedTorrent.save();
}));
}
export function getTorrentsWithoutSize() {
return Torrent.findAll({
where: literal(
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
order: [
['seeders', 'DESC']
]
});
}
export function getUpdateSeedersTorrents(limit = 50) {
const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
return Torrent.findAll({
where: literal(`torrent."updatedAt" < '${until}'`),
limit: limit,
order: [
['seeders', 'DESC'],
['updatedAt', 'ASC']
]
});
}
export function getUpdateSeedersNewTorrents(limit = 50) {
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
return Torrent.findAll({
where: literal(`torrent."updatedAt" < '${lastUpdate}' AND torrent."createdAt" > '${createdAfter}'`),
limit: limit,
order: [
['seeders', 'ASC'],
['updatedAt', 'ASC']
]
});
}
export function getNoContentsTorrents() {
return Torrent.findAll({
where: { opened: false, seeders: { [Op.gte]: 1 } },
limit: 500,
order: [[fn('RANDOM')]]
});
}
export function createTorrent(torrent) {
return Torrent.upsert(torrent)
.then(() => createContents(torrent.infoHash, torrent.contents))
.then(() => createSubtitles(torrent.infoHash, torrent.subtitles));
}
export function setTorrentSeeders(torrent, seeders) {
const where = torrent.infoHash
? { infoHash: torrent.infoHash }
: { provider: torrent.provider, torrentId: torrent.torrentId }
return Torrent.update(
{ seeders: seeders },
{ where: where }
);
}
export function deleteTorrent(torrent) {
return Torrent.destroy({ where: { infoHash: torrent.infoHash } })
}
export function createFile(file) {
if (file.id) {
return (file.dataValues ? file.save() : File.upsert(file))
.then(() => upsertSubtitles(file, file.subtitles));
}
if (file.subtitles && file.subtitles.length) {
file.subtitles = file.subtitles.map(subtitle => ({ infoHash: file.infoHash, title: subtitle.path, ...subtitle }));
}
return File.create(file, { include: [Subtitle], ignoreDuplicates: true });
}
export function getFiles(torrent) {
return File.findAll({ where: { infoHash: torrent.infoHash } });
}
export function getFilesBasedOnTitle(titleQuery) {
return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } });
}
export function deleteFile(file) {
return File.destroy({ where: { id: file.id } })
}
export function createSubtitles(infoHash, subtitles) {
if (subtitles && subtitles.length) {
return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle })));
}
return Promise.resolve();
}
export function upsertSubtitles(file, subtitles) {
if (file.id && subtitles && subtitles.length) {
return Promises.sequence(subtitles
.map(subtitle => {
subtitle.fileId = file.id;
subtitle.infoHash = subtitle.infoHash || file.infoHash;
subtitle.title = subtitle.title || subtitle.path;
return subtitle;
})
.map(subtitle => () => subtitle.dataValues ? subtitle.save() : Subtitle.create(subtitle)));
}
return Promise.resolve();
}
export function getSubtitles(torrent) {
return Subtitle.findAll({ where: { infoHash: torrent.infoHash } });
}
export function getUnassignedSubtitles() {
return Subtitle.findAll({ where: { fileId: null } });
}
export function createContents(infoHash, contents) {
if (contents && contents.length) {
return Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true })
.then(() => Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true }));
}
return Promise.resolve();
}
export function getContents(torrent) {
return Content.findAll({ where: { infoHash: torrent.infoHash } });
}
export function getSkipTorrent(torrent) {
return SkipTorrent.findByPk(torrent.infoHash)
.then((result) => {
if (!result) {
throw new Error(`torrent not found: ${torrent.infoHash}`);
}
return result.dataValues;
})
}
export function createSkipTorrent(torrent) {
return SkipTorrent.upsert({ infoHash: torrent.infoHash });
}

View File

@@ -0,0 +1,82 @@
import torrentStream from 'torrent-stream';
import {isSubtitle, isVideo} from './extension.js';
import { torrentConfig } from './config.js';
import { decode } from 'magnet-uri';
export async function torrentFiles(torrent, timeout) {
return filesFromTorrentStream(torrent, timeout)
.then(files => ({
contents: files,
videos: filterVideos(files),
subtitles: filterSubtitles(files)
}));
}
async function filesFromTorrentStream(torrent, timeout) {
return filesAndSizeFromTorrentStream(torrent, timeout).then(result => result.files);
}
const engineOptions = {
connections: torrentConfig.MAX_CONNECTIONS_PER_TORRENT,
uploads: 0,
verify: false,
dht: false,
tracker: true
}
function filesAndSizeFromTorrentStream(torrent, timeout = 30000) {
if (!torrent.infoHash) {
return Promise.reject(new Error("no infoHash..."));
}
const magnet = decode.encode({ infoHash: torrent.infoHash, announce: torrent.trackers });
return new Promise((resolve, rejected) => {
const timeoutId = setTimeout(() => {
engine.destroy();
rejected(new Error('No available connections for torrent!'));
}, timeout);
const engine = new torrentStream(magnet, engineOptions);
engine.ready(() => {
const files = engine.files
.map((file, fileId) => ({
fileIndex: fileId,
name: file.name,
path: file.path.replace(/^[^/]+\//, ''),
size: file.length
}));
const size = engine.torrent.length;
resolve({ files, size });
engine.destroy();
clearTimeout(timeoutId);
});
});
}
function filterVideos(files) {
if (files.length === 1 && !Number.isInteger(files[0].fileIndex)) {
return files;
}
const videos = files.filter(file => isVideo(file.path));
const maxSize = Math.max(...videos.map(video => video.size));
const minSampleRatio = videos.length <= 3 ? 3 : 10;
const minAnimeExtraRatio = 5;
const minRedundantRatio = videos.length <= 3 ? 30 : Number.MAX_VALUE;
const isSample = video => video.path.match(/sample|bonus|promo/i) && maxSize / parseInt(video.size) > minSampleRatio;
const isRedundant = video => maxSize / parseInt(video.size) > minRedundantRatio;
const isExtra = video => video.path.match(/extras?\//i);
const isAnimeExtra = video => video.path.match(/(?:\b|_)(?:NC)?(?:ED|OP|PV)(?:v?\d\d?)?(?:\b|_)/i)
&& maxSize / parseInt(video.size) > minAnimeExtraRatio;
const isWatermark = video => video.path.match(/^[A-Z-]+(?:\.[A-Z]+)?\.\w{3,4}$/)
&& maxSize / parseInt(video.size) > minAnimeExtraRatio
return videos
.filter(video => !isSample(video))
.filter(video => !isExtra(video))
.filter(video => !isAnimeExtra(video))
.filter(video => !isRedundant(video))
.filter(video => !isWatermark(video));
}
function filterSubtitles(files) {
return files.filter(file => isSubtitle(file.path));
}

View File

@@ -0,0 +1,172 @@
import { parse } from 'parse-torrent-title';
import { Type } from './types.js';
import * as Promises from './promises.js';
import * as repository from './repository.js';
import { getImdbId, getKitsuId } from './metadata.js';
import { parseTorrentFiles } from './torrentFiles.js';
import { assignSubtitles } from './torrentSubtitles.js';
import { isPackTorrent } from './parseHelper.js';
export async function createTorrentEntry(torrent, overwrite = false) {
const titleInfo = parse(torrent.title);
if (!torrent.imdbId && torrent.type !== Type.ANIME) {
torrent.imdbId = await getImdbId(titleInfo, torrent.type)
.catch(() => undefined);
}
if (torrent.imdbId && torrent.imdbId.length < 9) {
// pad zeros to imdbId if missing
torrent.imdbId = 'tt' + torrent.imdbId.replace('tt', '').padStart(7, '0');
}
if (torrent.imdbId && torrent.imdbId.length > 9 && torrent.imdbId.startsWith('tt0')) {
// sanitize imdbId from redundant zeros
torrent.imdbId = torrent.imdbId.replace(/tt0+([0-9]{7,})$/, 'tt$1');
}
if (!torrent.kitsuId && torrent.type === Type.ANIME) {
torrent.kitsuId = await getKitsuId(titleInfo)
.catch(() => undefined);
}
if (!torrent.imdbId && !torrent.kitsuId && !isPackTorrent(torrent)) {
console.log(`imdbId or kitsuId not found: ${torrent.provider} ${torrent.title}`);
return;
}
const { contents, videos, subtitles } = await parseTorrentFiles(torrent)
.then(torrentContents => overwrite ? overwriteExistingFiles(torrent, torrentContents) : torrentContents)
.then(torrentContents => assignSubtitles(torrentContents))
.catch(error => {
console.log(`Failed getting files for ${torrent.title}`, error.message);
return {};
});
if (!videos || !videos.length) {
console.log(`no video files found for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`);
return;
}
return repository.createTorrent({ ...torrent, contents, subtitles })
.then(() => Promises.sequence(videos.map(video => () => repository.createFile(video))))
.then(() => console.log(`Created ${torrent.provider} entry for [${torrent.infoHash}] ${torrent.title}`));
}
async function overwriteExistingFiles(torrent, torrentContents) {
const videos = torrentContents && torrentContents.videos;
if (videos && videos.length) {
const existingFiles = await repository.getFiles({ infoHash: videos[0].infoHash })
.then((existing) => existing
.reduce((map, next) => {
const fileIndex = next.fileIndex !== undefined ? next.fileIndex : null;
map[fileIndex] = (map[fileIndex] || []).concat(next);
return map;
}, {}))
.catch(() => undefined);
if (existingFiles && Object.keys(existingFiles).length) {
const overwrittenVideos = videos
.map(file => {
const mapping = videos.length === 1 && Object.keys(existingFiles).length === 1
? Object.values(existingFiles)[0]
: existingFiles[file.fileIndex !== undefined ? file.fileIndex : null];
if (mapping) {
const originalFile = mapping.shift();
return { id: originalFile.id, ...file };
}
return file;
});
return { ...torrentContents, videos: overwrittenVideos };
}
return torrentContents;
}
return Promise.reject(`No video files found for: ${torrent.title}`);
}
export async function createSkipTorrentEntry(torrent) {
return repository.createSkipTorrent(torrent);
}
export async function getStoredTorrentEntry(torrent) {
return repository.getSkipTorrent(torrent)
.catch(() => repository.getTorrent(torrent))
.catch(() => undefined);
}
export async function checkAndUpdateTorrent(torrent) {
const storedTorrent = torrent.dataValues
? torrent
: await repository.getTorrent(torrent).catch(() => undefined);
if (!storedTorrent) {
return false;
}
if (storedTorrent.provider === 'RARBG') {
return true;
}
if (storedTorrent.provider === 'KickassTorrents' && torrent.provider) {
storedTorrent.provider = torrent.provider;
storedTorrent.torrentId = torrent.torrentId;
}
if (!storedTorrent.languages && torrent.languages && storedTorrent.provider !== 'RARBG') {
storedTorrent.languages = torrent.languages;
await storedTorrent.save();
console.log(`Updated [${storedTorrent.infoHash}] ${storedTorrent.title} language to ${torrent.languages}`);
}
return createTorrentContents({ ...storedTorrent.get(), torrentLink: torrent.torrentLink })
.then(() => updateTorrentSeeders(torrent));
}
export async function createTorrentContents(torrent) {
if (torrent.opened) {
return;
}
const storedVideos = await repository.getFiles(torrent).catch(() => []);
if (!storedVideos || !storedVideos.length) {
return;
}
const notOpenedVideo = storedVideos.length === 1 && !Number.isInteger(storedVideos[0].fileIndex);
const imdbId = Promises.mostCommonValue(storedVideos.map(stored => stored.imdbId));
const kitsuId = Promises.mostCommonValue(storedVideos.map(stored => stored.kitsuId));
const { contents, videos, subtitles } = await parseTorrentFiles({ ...torrent, imdbId, kitsuId })
.then(torrentContents => notOpenedVideo ? torrentContents : { ...torrentContents, videos: storedVideos })
.then(torrentContents => assignSubtitles(torrentContents))
.catch(error => {
console.log(`Failed getting contents for [${torrent.infoHash}] ${torrent.title}`, error.message);
return {};
});
if (!contents || !contents.length) {
return;
}
if (notOpenedVideo && videos.length === 1) {
// if both have a single video and stored one was not opened, update stored one to true metadata and use that
storedVideos[0].fileIndex = videos[0].fileIndex;
storedVideos[0].title = videos[0].title;
storedVideos[0].size = videos[0].size;
storedVideos[0].subtitles = videos[0].subtitles;
videos[0] = storedVideos[0];
}
// no videos available or more than one new videos were in the torrent
const shouldDeleteOld = notOpenedVideo && videos.every(video => !video.id);
return repository.createTorrent({ ...torrent, contents, subtitles })
.then(() => {
if (shouldDeleteOld) {
console.error(`Deleting old video for [${torrent.infoHash}] ${torrent.title}`)
return storedVideos[0].destroy();
}
return Promise.resolve();
})
.then(() => Promises.sequence(videos.map(video => () => repository.createFile(video))))
.then(() => console.log(`Created contents for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`))
.catch(error => console.error(`Failed saving contents for [${torrent.infoHash}] ${torrent.title}`, error));
}
export async function updateTorrentSeeders(torrent) {
if (!(torrent.infoHash || (torrent.provider && torrent.torrentId)) || !Number.isInteger(torrent.seeders)) {
return torrent;
}
return repository.setTorrentSeeders(torrent, torrent.seeders)
.catch(error => {
console.warn('Failed updating seeders:', error);
return undefined;
});
}

View File

@@ -0,0 +1,512 @@
import moment from 'moment';
import Bottleneck from 'bottleneck';
import distance from 'jaro-winkler';
import { parse } from 'parse-torrent-title';
import * as Promises from './promises.js';
import { getMetadata, getImdbId, getKitsuId } from './metadata.js';
import { parseSeriesVideos, isPackTorrent } from './parseHelper.js';
import { Type } from './types.js';
import { isDisk } from './extension.js';
import {torrentFiles} from "./torrent.js";
import { metadataConfig } from './config.js';
const MIN_SIZE = 5 * 1024 * 1024; // 5 MB
const imdb_limiter = new Bottleneck({ maxConcurrent: metadataConfig.IMDB_CONCURRENT, minTime: metadataConfig.IMDB_INTERVAL_MS });
export async function parseTorrentFiles(torrent) {
const parsedTorrentName = parse(torrent.title);
const metadata = await getMetadata(torrent.kitsuId || torrent.imdbId, torrent.type || Type.MOVIE)
.then(meta => Object.assign({}, meta))
.catch(() => undefined);
// if (metadata && metadata.type !== torrent.type && torrent.type !== Type.ANIME) {
// throw new Error(`Mismatching entry type for ${torrent.name}: ${torrent.type}!=${metadata.type}`);
// }
if (torrent.type !== Type.ANIME && metadata && metadata.type && metadata.type !== torrent.type) {
// it's actually a movie/series
torrent.type = metadata.type;
}
if (torrent.type === Type.MOVIE && (!parsedTorrentName.seasons ||
parsedTorrentName.season === 5 && [1, 5].includes(parsedTorrentName.episode))) {
return parseMovieFiles(torrent, parsedTorrentName, metadata);
}
return parseSeriesFiles(torrent, parsedTorrentName, metadata)
}
async function parseMovieFiles(torrent, parsedName, metadata) {
const { contents, videos, subtitles } = await getMoviesTorrentContent(torrent);
const filteredVideos = videos
.filter(video => video.size > MIN_SIZE)
.filter(video => !isFeaturette(video));
if (isSingleMovie(filteredVideos)) {
const parsedVideos = filteredVideos.map(video => ({
infoHash: torrent.infoHash,
fileIndex: video.fileIndex,
title: video.path || torrent.title,
size: video.size || torrent.size,
imdbId: torrent.imdbId || metadata && metadata.imdbId,
kitsuId: torrent.kitsuId || metadata && metadata.kitsuId
}));
return { contents, videos: parsedVideos, subtitles };
}
const parsedVideos = await Promises.sequence(filteredVideos.map(video => () => isFeaturette(video)
? Promise.resolve(video)
: findMovieImdbId(video.name).then(imdbId => ({ ...video, imdbId }))))
.then(videos => videos.map(video => ({
infoHash: torrent.infoHash,
fileIndex: video.fileIndex,
title: video.path || video.name,
size: video.size,
imdbId: video.imdbId,
})));
return { contents, videos: parsedVideos, subtitles };
}
async function parseSeriesFiles(torrent, parsedName, metadata) {
const { contents, videos, subtitles } = await getSeriesTorrentContent(torrent);
const parsedVideos = await Promise.resolve(videos)
.then(videos => videos.filter(video => videos.length === 1 || video.size > MIN_SIZE))
.then(videos => parseSeriesVideos(torrent, videos))
.then(videos => decomposeEpisodes(torrent, videos, metadata))
.then(videos => assignKitsuOrImdbEpisodes(torrent, videos, metadata))
.then(videos => Promise.all(videos.map(video => video.isMovie
? mapSeriesMovie(video, torrent)
: mapSeriesEpisode(video, torrent, videos))))
.then(videos => videos
.reduce((a, b) => a.concat(b), [])
.map(video => isFeaturette(video) ? clearInfoFields(video) : video))
return { contents, videos: parsedVideos, subtitles };
}
async function getMoviesTorrentContent(torrent) {
const files = await torrentFiles(torrent)
.catch(error => {
if (!isPackTorrent(torrent)) {
return { videos: [{ name: torrent.title, path: torrent.title, size: torrent.size }] }
}
return Promise.reject(error);
});
if (files.contents && files.contents.length && !files.videos.length && isDiskTorrent(files.contents)) {
files.videos = [{ name: torrent.title, path: torrent.title, size: torrent.size }];
}
return files;
}
async function getSeriesTorrentContent(torrent) {
return torrentFiles(torrent)
.catch(error => {
if (!isPackTorrent(torrent)) {
return { videos: [{ name: torrent.title, path: torrent.title, size: torrent.size }] }
}
return Promise.reject(error);
});
}
async function mapSeriesEpisode(file, torrent, files) {
if (!file.episodes && !file.kitsuEpisodes) {
if (files.length === 1 || files.some(f => f.episodes || f.kitsuEpisodes) || parse(torrent.title).seasons) {
return Promise.resolve({
infoHash: torrent.infoHash,
fileIndex: file.fileIndex,
title: file.path || file.name,
size: file.size,
imdbId: torrent.imdbId || file.imdbId,
});
}
return Promise.resolve([]);
}
const episodeIndexes = [...(file.episodes || file.kitsuEpisodes).keys()];
return Promise.resolve(episodeIndexes.map((index) => ({
infoHash: torrent.infoHash,
fileIndex: file.fileIndex,
title: file.path || file.name,
size: file.size,
imdbId: file.imdbId || torrent.imdbId,
imdbSeason: file.season,
imdbEpisode: file.episodes && file.episodes[index],
kitsuId: file.kitsuId || torrent.kitsuId,
kitsuEpisode: file.kitsuEpisodes && file.kitsuEpisodes[index]
})))
}
async function mapSeriesMovie(file, torrent) {
const kitsuId = torrent.type === Type.ANIME ? await findMovieKitsuId(file) : undefined;
const imdbId = !kitsuId ? await findMovieImdbId(file) : undefined;
const metadata = await getMetadata(kitsuId || imdbId, Type.MOVIE).catch(() => ({}));
const hasEpisode = metadata.videos && metadata.videos.length && (file.episode || metadata.videos.length === 1);
const episodeVideo = hasEpisode && metadata.videos[(file.episode || 1) - 1];
return [{
infoHash: torrent.infoHash,
fileIndex: file.fileIndex,
title: file.path || file.name,
size: file.size,
imdbId: metadata.imdbId || imdbId,
kitsuId: metadata.kitsuId || kitsuId,
imdbSeason: episodeVideo && metadata.imdbId ? episodeVideo.imdbSeason : undefined,
imdbEpisode: episodeVideo && metadata.imdbId ? episodeVideo.imdbEpisode || episodeVideo.episode : undefined,
kitsuEpisode: episodeVideo && metadata.kitsuId ? episodeVideo.kitsuEpisode || episodeVideo.episode : undefined
}];
}
async function decomposeEpisodes(torrent, files, metadata = { episodeCount: [] }) {
if (files.every(file => !file.episodes && !file.date)) {
return files;
}
preprocessEpisodes(files);
if (torrent.type === Type.ANIME && torrent.kitsuId) {
if (needsCinemetaMetadataForAnime(files, metadata)) {
// In some cases anime could be resolved to wrong kitsuId
// because of imdb season naming/absolute per series naming/multiple seasons
// So in these cases we need to fetch cinemeta based metadata and decompose episodes using that
await updateToCinemetaMetadata(metadata);
if (files.some(file => Number.isInteger(file.season))) {
// sometimes multi season anime torrents don't include season 1 naming
files
.filter(file => !Number.isInteger(file.season) && file.episodes)
.forEach(file => file.season = 1);
}
} else {
// otherwise for anime type episodes are always absolute and for a single season
files
.filter(file => file.episodes && file.season !== 0)
.forEach(file => file.season = 1);
return files;
}
}
const sortedEpisodes = files
.map(file => !file.isMovie && file.episodes || [])
.reduce((a, b) => a.concat(b), [])
.sort((a, b) => a - b);
if (isConcatSeasonAndEpisodeFiles(files, sortedEpisodes, metadata)) {
decomposeConcatSeasonAndEpisodeFiles(torrent, files, metadata);
} else if (isDateEpisodeFiles(files, metadata)) {
decomposeDateEpisodeFiles(torrent, files, metadata);
} else if (isAbsoluteEpisodeFiles(torrent, files, metadata)) {
decomposeAbsoluteEpisodeFiles(torrent, files, metadata);
}
// decomposeEpisodeTitleFiles(torrent, files, metadata);
return files;
}
function preprocessEpisodes(files) {
// reverse special episode naming when they named with 0 episode, ie. S02E00
files
.filter(file => Number.isInteger(file.season) && file.episode === 0)
.forEach(file => {
file.episode = file.season
file.episodes = [file.season]
file.season = 0;
})
}
function isConcatSeasonAndEpisodeFiles(files, sortedEpisodes, metadata) {
if (metadata.kitsuId !== undefined) {
// anime does not use this naming scheme in 99% of cases;
return false;
}
// decompose concat season and episode files (ex. 101=S01E01) in case:
// 1. file has a season, but individual files are concatenated with that season (ex. path Season 5/511 - Prize
// Fighters.avi)
// 2. file does not have a season and the episode does not go out of range for the concat season
// episode count
const thresholdAbove = Math.max(Math.ceil(files.length * 0.05), 5);
const thresholdSorted = Math.max(Math.ceil(files.length * 0.8), 8);
const threshold = Math.max(Math.ceil(files.length * 0.8), 5);
const sortedConcatEpisodes = sortedEpisodes
.filter(ep => ep > 100)
.filter(ep => metadata.episodeCount[div100(ep) - 1] < ep)
.filter(ep => metadata.episodeCount[div100(ep) - 1] >= mod100(ep));
const concatFileEpisodes = files
.filter(file => !file.isMovie && file.episodes)
.filter(file => !file.season || file.episodes.every(ep => div100(ep) === file.season));
const concatAboveTotalEpisodeCount = files
.filter(file => !file.isMovie && file.episodes && file.episodes.every(ep => ep > 100))
.filter(file => file.episodes.every(ep => ep > metadata.totalCount));
return sortedConcatEpisodes.length >= thresholdSorted && concatFileEpisodes.length >= threshold
|| concatAboveTotalEpisodeCount.length >= thresholdAbove;
}
function isDateEpisodeFiles(files, metadata) {
return files.every(file => (!file.season || !metadata.episodeCount[file.season - 1]) && file.date);
}
function isAbsoluteEpisodeFiles(torrent, files, metadata) {
const threshold = Math.ceil(files.length / 5);
const isAnime = torrent.type === Type.ANIME && torrent.kitsuId;
const nonMovieEpisodes = files
.filter(file => !file.isMovie && file.episodes);
const absoluteEpisodes = files
.filter(file => file.season && file.episodes)
.filter(file => file.episodes.every(ep => metadata.episodeCount[file.season - 1] < ep))
return nonMovieEpisodes.every(file => !file.season)
|| (isAnime && nonMovieEpisodes.every(file => file.season > metadata.episodeCount.length))
|| absoluteEpisodes.length >= threshold;
}
function isNewEpisodeNotInMetadata(torrent, file, metadata) {
// new episode might not yet been indexed by cinemeta.
// detect this if episode number is larger than the last episode or season is larger than the last one
// only for non anime metas
const isAnime = torrent.type === Type.ANIME && torrent.kitsuId;
return !isAnime && !file.isMovie && file.episodes && file.season !== 1
&& /continuing|current/i.test(metadata.status)
&& file.season >= metadata.episodeCount.length
&& file.episodes.every(ep => ep > (metadata.episodeCount[file.season - 1] || 0));
}
function decomposeConcatSeasonAndEpisodeFiles(torrent, files, metadata) {
files
.filter(file => file.episodes && file.season !== 0 && file.episodes.every(ep => ep > 100))
.filter(file => metadata.episodeCount[(file.season || div100(file.episodes[0])) - 1] < 100)
.filter(file => file.season && file.episodes.every(ep => div100(ep) === file.season) || !file.season)
.forEach(file => {
file.season = div100(file.episodes[0]);
file.episodes = file.episodes.map(ep => mod100(ep))
});
}
function decomposeAbsoluteEpisodeFiles(torrent, files, metadata) {
if (metadata.episodeCount.length === 0) {
files
.filter(file => !Number.isInteger(file.season) && file.episodes && !file.isMovie)
.forEach(file => {
file.season = 1;
});
return;
}
files
.filter(file => file.episodes && !file.isMovie && file.season !== 0)
.filter(file => !isNewEpisodeNotInMetadata(torrent, file, metadata))
.filter(file => !file.season || (metadata.episodeCount[file.season - 1] || 0) < file.episodes[0])
.forEach(file => {
const seasonIdx = ([...metadata.episodeCount.keys()]
.find((i) => metadata.episodeCount.slice(0, i + 1).reduce((a, b) => a + b) >= file.episodes[0])
+ 1 || metadata.episodeCount.length) - 1;
file.season = seasonIdx + 1;
file.episodes = file.episodes
.map(ep => ep - metadata.episodeCount.slice(0, seasonIdx).reduce((a, b) => a + b, 0))
});
}
function decomposeDateEpisodeFiles(torrent, files, metadata) {
if (!metadata || !metadata.videos || !metadata.videos.length) {
return;
}
const timeZoneOffset = getTimeZoneOffset(metadata.country);
const offsetVideos = metadata.videos
.reduce((map, video) => {
const releaseDate = moment(video.released).utcOffset(timeZoneOffset).format('YYYY-MM-DD');
map[releaseDate] = video;
return map;
}, {});
files
.filter(file => file.date)
.forEach(file => {
const video = offsetVideos[file.date];
if (video) {
file.season = video.season;
file.episodes = [video.episode];
}
});
}
/* eslint-disable no-unused-vars */
function decomposeEpisodeTitleFiles(torrent, files, metadata) {
files
// .filter(file => !file.season)
.map(file => {
const episodeTitle = file.name.replace('_', ' ')
.replace(/^.*(?:E\d+[abc]?|- )\s?(.+)\.\w{1,4}$/, '$1')
.trim();
const foundEpisode = metadata.videos
.map(video => ({ ...video, distance: distance(episodeTitle, video.name) }))
.sort((a, b) => b.distance - a.distance)[0];
if (foundEpisode) {
file.isMovie = false;
file.season = foundEpisode.season;
file.episodes = [foundEpisode.episode];
}
})
}
/* eslint-enable no-unused-vars */
function getTimeZoneOffset(country) {
switch (country) {
case 'United States':
case 'USA':
return '-08:00';
default:
return '00:00';
}
}
function assignKitsuOrImdbEpisodes(torrent, files, metadata) {
if (!metadata || !metadata.videos || !metadata.videos.length) {
if (torrent.type === Type.ANIME) {
// assign episodes as kitsu episodes for anime when no metadata available for imdb mapping
files
.filter(file => file.season && file.episodes)
.forEach(file => {
file.kitsuEpisodes = file.episodes;
file.season = undefined;
file.episodes = undefined;
})
if (metadata.type === Type.MOVIE && files.every(file => !file.imdbId)) {
// sometimes a movie has episode naming, thus not recognized as a movie and imdbId not assigned
files.forEach(file => file.imdbId = metadata.imdbId);
}
}
return files;
}
const seriesMapping = metadata.videos
.reduce((map, video) => {
const episodeMap = map[video.season] || {};
episodeMap[video.episode] = video;
map[video.season] = episodeMap;
return map;
}, {});
if (metadata.videos.some(video => Number.isInteger(video.imdbSeason)) || !metadata.imdbId) {
// kitsu episode info is the base
files
.filter(file => Number.isInteger(file.season) && file.episodes)
.map(file => {
const seasonMapping = seriesMapping[file.season];
const episodeMapping = seasonMapping && seasonMapping[file.episodes[0]];
file.kitsuEpisodes = file.episodes;
if (episodeMapping && Number.isInteger(episodeMapping.imdbSeason)) {
file.imdbId = metadata.imdbId;
file.season = episodeMapping.imdbSeason;
file.episodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].imdbEpisode);
} else {
// no imdb mapping available for episode
file.season = undefined;
file.episodes = undefined;
}
});
} else if (metadata.videos.some(video => video.kitsuEpisode)) {
// imdb episode info is base
files
.filter(file => Number.isInteger(file.season) && file.episodes)
.forEach(file => {
if (seriesMapping[file.season]) {
const seasonMapping = seriesMapping[file.season];
file.imdbId = metadata.imdbId;
file.kitsuId = seasonMapping[file.episodes[0]] && seasonMapping[file.episodes[0]].kitsuId;
file.kitsuEpisodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].kitsuEpisode);
} else if (seriesMapping[file.season - 1]) {
// sometimes a second season might be a continuation of the previous season
const seasonMapping = seriesMapping[file.season - 1];
const episodes = Object.values(seasonMapping);
const firstKitsuId = episodes.length && episodes[0].kitsuId;
const differentTitlesCount = new Set(episodes.map(ep => ep.kitsuId)).size
const skippedCount = episodes.filter(ep => ep.kitsuId === firstKitsuId).length;
const seasonEpisodes = files
.filter(otherFile => otherFile.season === file.season)
.reduce((a, b) => a.concat(b.episodes), []);
const isAbsoluteOrder = seasonEpisodes.every(ep => ep > skippedCount && ep <= episodes.length)
const isNormalOrder = seasonEpisodes.every(ep => ep + skippedCount <= episodes.length)
if (differentTitlesCount >= 1 && (isAbsoluteOrder || isNormalOrder)) {
file.imdbId = metadata.imdbId;
file.season = file.season - 1;
file.episodes = file.episodes.map(ep => isAbsoluteOrder ? ep : ep + skippedCount);
file.kitsuId = seasonMapping[file.episodes[0]].kitsuId;
file.kitsuEpisodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].kitsuEpisode);
}
} else if (Object.values(seriesMapping).length === 1 && seriesMapping[1]) {
// sometimes series might be named with sequel season but it's not a season on imdb and a new title
const seasonMapping = seriesMapping[1];
file.imdbId = metadata.imdbId;
file.season = 1;
file.kitsuId = seasonMapping[file.episodes[0]].kitsuId;
file.kitsuEpisodes = file.episodes.map(ep => seasonMapping[ep] && seasonMapping[ep].kitsuEpisode);
}
});
}
return files;
}
function needsCinemetaMetadataForAnime(files, metadata) {
if (!metadata || !metadata.imdbId || !metadata.videos || !metadata.videos.length) {
return false;
}
const minSeason = Math.min(...metadata.videos.map(video => video.imdbSeason)) || Number.MAX_VALUE;
const maxSeason = Math.max(...metadata.videos.map(video => video.imdbSeason)) || Number.MAX_VALUE;
const differentSeasons = new Set(metadata.videos
.map(video => video.imdbSeason)
.filter(season => Number.isInteger(season))).size;
const total = metadata.totalCount || Number.MAX_VALUE;
return differentSeasons > 1 || files
.filter(file => !file.isMovie && file.episodes)
.some(file => file.season < minSeason || file.season > maxSeason || file.episodes.every(ep => ep > total));
}
async function updateToCinemetaMetadata(metadata) {
return getMetadata(metadata.imdbId, metadata.type)
.then(newMetadata => !newMetadata.videos || !newMetadata.videos.length ? metadata : newMetadata)
.then(newMetadata => {
metadata.videos = newMetadata.videos;
metadata.episodeCount = newMetadata.episodeCount;
metadata.totalCount = newMetadata.totalCount;
return metadata;
})
.catch(error => console.warn(`Failed ${metadata.imdbId} metadata cinemeta update due: ${error.message}`));
}
function findMovieImdbId(title) {
const parsedTitle = typeof title === 'string' ? parse(title) : title;
console.log(`Finding movie imdbId for ${title}`);
return imdb_limiter.schedule(() => getImdbId(parsedTitle, Type.MOVIE).catch(() => undefined));
}
function findMovieKitsuId(title) {
const parsedTitle = typeof title === 'string' ? parse(title) : title;
return getKitsuId(parsedTitle, Type.MOVIE).catch(() => undefined);
}
function isDiskTorrent(contents) {
return contents.some(content => isDisk(content.path));
}
function isSingleMovie(videos) {
return videos.length === 1 ||
(videos.length === 2 &&
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?1\b|^0?1\.\w{2,4}$/i.test(v.path)) &&
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?2\b|^0?2\.\w{2,4}$/i.test(v.path)));
}
function isFeaturette(video) {
return /featurettes?\/|extras-grym/i.test(video.path);
}
function clearInfoFields(video) {
video.imdbId = undefined;
video.imdbSeason = undefined;
video.imdbEpisode = undefined;
video.kitsuId = undefined;
video.kitsuEpisode = undefined;
return video;
}
function div100(episode) {
return (episode / 100 >> 0); // floor to nearest int
}
function mod100(episode) {
return episode % 100;
}

View File

@@ -0,0 +1,89 @@
import { parse } from 'parse-torrent-title';
export function assignSubtitles({ contents, videos, subtitles }) {
if (videos && videos.length && subtitles && subtitles.length) {
if (videos.length === 1) {
videos[0].subtitles = subtitles;
return { contents, videos, subtitles: [] };
}
const parsedVideos = videos
.map(video => _parseVideo(video));
const assignedSubs = subtitles
.map(subtitle => ({ subtitle, videos: _mostProbableSubtitleVideos(subtitle, parsedVideos) }));
const unassignedSubs = assignedSubs
.filter(assignedSub => !assignedSub.videos)
.map(assignedSub => assignedSub.subtitle);
assignedSubs
.filter(assignedSub => assignedSub.videos)
.forEach(assignedSub => assignedSub.videos
.forEach(video => video.subtitles = (video.subtitles || []).concat(assignedSub.subtitle)));
return { contents, videos, subtitles: unassignedSubs };
}
return { contents, videos, subtitles };
}
function _parseVideo(video) {
const fileName = video.title.split('/').pop().replace(/\.(\w{2,4})$/, '');
const folderName = video.title.replace(/\/?[^/]+$/, '');
return {
videoFile: video,
fileName: fileName,
folderName: folderName,
...parseFilename(video.title)
};
}
function _mostProbableSubtitleVideos(subtitle, parsedVideos) {
const subTitle = (subtitle.title || subtitle.path).split('/').pop().replace(/\.(\w{2,4})$/, '');
const parsedSub = parsePath(subtitle.title || subtitle.path);
const byFileName = parsedVideos.filter(video => subTitle.includes(video.fileName));
if (byFileName.length === 1) {
return byFileName.map(v => v.videoFile);
}
const byTitleSeasonEpisode = parsedVideos.filter(video => video.title === parsedSub.title
&& arrayEquals(video.seasons, parsedSub.seasons)
&& arrayEquals(video.episodes, parsedSub.episodes));
if (singleVideoFile(byTitleSeasonEpisode)) {
return byTitleSeasonEpisode.map(v => v.videoFile);
}
const bySeasonEpisode = parsedVideos.filter(video => arrayEquals(video.seasons, parsedSub.seasons)
&& arrayEquals(video.episodes, parsedSub.episodes));
if (singleVideoFile(bySeasonEpisode)) {
return bySeasonEpisode.map(v => v.videoFile);
}
const byTitle = parsedVideos.filter(video => video.title && video.title === parsedSub.title);
if (singleVideoFile(byTitle)) {
return byTitle.map(v => v.videoFile);
}
const byEpisode = parsedVideos.filter(video => arrayEquals(video.episodes, parsedSub.episodes));
if (singleVideoFile(byEpisode)) {
return byEpisode.map(v => v.videoFile);
}
return undefined;
}
function singleVideoFile(videos) {
return new Set(videos.map(v => v.videoFile.fileIndex)).size === 1;
}
function parsePath(path) {
const pathParts = path.split('/').map(part => parseFilename(part));
const parsedWithEpisode = pathParts.find(parsed => parsed.season && parsed.episodes);
return parsedWithEpisode || pathParts[pathParts.length - 1];
}
function parseFilename(filename) {
const parsedInfo = parse(filename)
const titleEpisode = parsedInfo.title.match(/(\d+)$/);
if (!parsedInfo.episodes && titleEpisode) {
parsedInfo.episodes = [parseInt(titleEpisode[1], 10)];
}
return parsedInfo;
}
function arrayEquals(array1, array2) {
if (!array1 || !array2) return array1 === array2;
return array1.length === array2.length && array1.every((value, index) => value === array2[index])
}

View File

@@ -0,0 +1,25 @@
import axios from 'axios';
import {cacheTrackers} from "./cache.js";
import { trackerConfig } from './config.js';
const downloadTrackers = async () => {
const response = await axios.get(trackerConfig.TRACKERS_URL);
const trackersListText = response.data;
// Trackers are separated by a newline character
let urlTrackers = trackersListText.split("\n");
// remove blank lines
urlTrackers = urlTrackers.filter(line => line.trim() !== '');
if (!trackerConfig.UDP_ENABLED) {
// remove any udp trackers
urlTrackers = urlTrackers.filter(line => !line.startsWith('udp://'));
}
console.log(`Trackers updated at ${Date.now()}: ${urlTrackers.length} trackers`);
return urlTrackers;
};
export const getTrackers = async () => {
return cacheTrackers(downloadTrackers);
};

View File

@@ -0,0 +1,6 @@
export const Type = {
MOVIE: 'movie',
SERIES: 'series',
ANIME: 'anime',
PORN: 'xxx',
};

5736
src/node/consumer/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,34 @@
{
"name": "consumer",
"version": "1.0.0",
"exports": "./index.js",
"type": "module",
"scripts": {
"start": "node index.js",
"lint": "eslint . --ext .js"
},
"author": "A Dude",
"license": "MIT",
"dependencies": {
"amqplib": "^0.10.3",
"axios": "^1.6.1",
"bluebird": "^3.7.2",
"bottleneck": "^2.19.5",
"cache-manager": "^3.4.4",
"cache-manager-mongodb": "^0.3.0",
"google-sr": "^3.2.1",
"jaro-winkler": "^0.2.8",
"magnet-uri": "^6.2.0",
"moment": "^2.30.1",
"name-to-imdb": "^3.0.4",
"parse-torrent-title": "git://github.com/TheBeastLT/parse-torrent-title.git#022408972c2a040f846331a912a6a8487746a654",
"pg": "^8.11.3",
"sequelize": "^6.31.1",
"torrent-stream": "^1.2.1",
"user-agents": "^1.0.1444"
},
"devDependencies": {
"eslint": "^8.56.0",
"eslint-plugin-import": "^2.29.1"
}
}

View File

@@ -0,0 +1,5 @@
{
"GithubSettings": {
"PAT": ""
}
}

View File

@@ -0,0 +1,31 @@
{
"Serilog": {
"Using": [ "Serilog.Sinks.Console" ],
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"System": "Warning",
"System.Net.Http.HttpClient.Scraper.LogicalHandler": "Warning",
"System.Net.Http.HttpClient.Scraper.ClientHandler": "Warning",
"Quartz.Impl.StdSchedulerFactory": "Warning",
"Quartz.Core.QuartzScheduler": "Warning",
"Quartz.Simpl.RAMJobStore": "Warning",
"Quartz.Core.JobRunShell": "Warning",
"Quartz.Core.SchedulerSignalerImpl": "Warning"
}
},
"WriteTo": [
{
"Name": "Console",
"Args": {
"outputTemplate": "{Timestamp:HH:mm:ss} [{Level}] [{SourceContext}] {Message}{NewLine}{Exception}"
}
}
],
"Enrich": [ "FromLogContext", "WithMachineName", "WithThreadId" ],
"Properties": {
"Application": "Producer"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"RabbitMqConfiguration": {
"Host": "localhost",
"Username": "guest",
"Password": "guest",
"QueueName": "test-queue",
"Durable": true
}
}

View File

@@ -0,0 +1,32 @@
{
"ScrapeConfiguration": {
"StorageConnectionString": "",
"Scrapers": [
{
"Name": "SyncEzTvJob",
"IntervalSeconds": 60,
"Enabled": true
},
{
"Name": "SyncTpbJob",
"IntervalSeconds": 60,
"Enabled": true
},
{
"Name": "SyncYtsJob",
"IntervalSeconds": 60,
"Enabled": true
},
{
"Name": "SyncTgxJob",
"IntervalSeconds": 60,
"Enabled": true
},
{
"Name": "SyncDmmJob",
"IntervalSeconds": 1800,
"Enabled": true
}
]
}
}

View File

@@ -0,0 +1,25 @@
namespace Scraper.Crawlers;
public abstract class BaseCrawler(ILogger<BaseCrawler> logger, IDataStorage storage) : ICrawler
{
protected abstract IReadOnlyDictionary<string, string> Mappings { get; }
protected abstract string Url { get; }
protected abstract string Source { get; }
protected IDataStorage Storage => storage;
public virtual Task Execute() => Task.CompletedTask;
protected async Task<InsertTorrentResult> InsertTorrents(IReadOnlyCollection<Torrent> torrent)
{
var result = await storage.InsertTorrents(torrent);
if (!result.Success)
{
logger.LogWarning("Ingestion Failed: [{Error}]", result.ErrorMessage);
return result;
}
logger.LogInformation("Ingestion Successful - Wrote {Count} new torrents", result.InsertedCount);
return result;
}
}

View File

@@ -0,0 +1,45 @@
namespace Scraper.Crawlers;
public abstract class BaseJsonCrawler(IHttpClientFactory httpClientFactory, ILogger<BaseJsonCrawler> logger, IDataStorage storage) : BaseCrawler(logger, storage)
{
private readonly HttpClient _client = httpClientFactory.CreateClient("Scraper");
protected virtual async Task Execute(string collectionName)
{
logger.LogInformation("Starting {Source} crawl", Source);
using var client = httpClientFactory.CreateClient("Scraper");
var jsonBody = await _client.GetStringAsync(Url);
using var json = JsonDocument.Parse(jsonBody);
var torrents = json.RootElement.EnumerateArray()
.Select(ParseTorrent)
.Where(x => x is not null)
.ToList();
if (torrents.Count == 0)
{
logger.LogWarning("No torrents found in {Source} response", Source);
return;
}
await InsertTorrents(torrents!);
}
protected virtual void HandleInfoHash(JsonElement item, Torrent torrent, string infoHashKey)
{
if (!Mappings.ContainsKey(infoHashKey))
{
return;
}
var infoHash = item.GetProperty(Mappings[infoHashKey]).GetString();
if (infoHash is not null)
{
torrent.InfoHash = infoHash;
}
}
protected abstract Torrent? ParseTorrent(JsonElement item);
}

View File

@@ -0,0 +1,43 @@
namespace Scraper.Crawlers;
public abstract class BaseXmlCrawler(IHttpClientFactory httpClientFactory, ILogger<BaseXmlCrawler> logger, IDataStorage storage) : BaseCrawler(logger, storage)
{
public override async Task Execute()
{
logger.LogInformation("Starting {Source} crawl", Source);
using var client = httpClientFactory.CreateClient("Scraper");
var xml = await client.GetStringAsync(Url);
var xmlRoot = XElement.Parse(xml);
var torrents = xmlRoot.Descendants("item")
.Select(ParseTorrent)
.Where(x => x is not null)
.ToList();
if (torrents.Count == 0)
{
logger.LogWarning("No torrents found in {Source} response", Source);
return;
}
await InsertTorrents(torrents!);
}
protected virtual void HandleInfoHash(XElement itemNode, Torrent torrent, string infoHashKey)
{
if (!Mappings.ContainsKey(infoHashKey))
{
return;
}
var infoHash = itemNode.Element(Mappings[infoHashKey])?.Value;
if (infoHash is not null)
{
torrent.InfoHash = infoHash;
}
}
protected abstract Torrent? ParseTorrent(XElement itemNode);
}

View File

@@ -0,0 +1,11 @@
namespace Scraper.Crawlers;
public class CrawlerProvider(IServiceProvider serviceProvider) : ICrawlerProvider
{
public IEnumerable<ICrawler> GetAll() =>
serviceProvider.GetServices<ICrawler>();
public ICrawler Get(string name) =>
serviceProvider.GetRequiredKeyedService<ICrawler>(name);
}

View File

@@ -0,0 +1,141 @@
namespace Scraper.Crawlers.Sites;
public partial class DebridMediaManagerCrawler(
IHttpClientFactory httpClientFactory,
ILogger<DebridMediaManagerCrawler> logger,
IDataStorage storage,
GithubConfiguration githubConfiguration) : BaseCrawler(logger, storage)
{
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
private static partial Regex HashCollectionMatcher();
[GeneratedRegex(@"[sS]([0-9]{1,2})|seasons?[\s-]?([0-9]{1,2})", RegexOptions.IgnoreCase, "en-GB")]
private static partial Regex SeasonMatcher();
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
protected override string Url => "https://api.github.com/repos/debridmediamanager/hashlists/contents";
protected override string Source => "DMM";
public override async Task Execute()
{
var client = httpClientFactory.CreateClient("Scraper");
client.DefaultRequestHeaders.Authorization = new("Bearer", githubConfiguration.PAT);
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
var jsonBody = await client.GetStringAsync(Url);
var json = JsonDocument.Parse(jsonBody);
foreach (var entry in json.RootElement.EnumerateArray())
{
await ParsePage(entry, client);
}
}
private async Task ParsePage(JsonElement entry, HttpClient client)
{
var (pageIngested, name) = await IsAlreadyIngested(entry);
if (string.IsNullOrEmpty(name) || pageIngested)
{
return;
}
var url = entry.GetProperty("download_url").GetString();
var pageSource = await client.GetStringAsync(url);
await ExtractPageContents(pageSource, name);
}
private async Task ExtractPageContents(string pageSource, string name)
{
var match = HashCollectionMatcher().Match(pageSource);
if (!match.Success)
{
logger.LogWarning("Failed to match hash collection for {Name}", name);
await Storage.MarkPageAsIngested(name);
return;
}
var encodedJson = match.Groups.Values.ElementAtOrDefault(1);
if (string.IsNullOrEmpty(encodedJson?.Value))
{
logger.LogWarning("Failed to extract encoded json for {Name}", name);
return;
}
await ProcessExtractedContentsAsTorrentCollection(encodedJson.Value, name);
}
private async Task ProcessExtractedContentsAsTorrentCollection(string encodedJson, string name)
{
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson);
var json = JsonDocument.Parse(decodedJson);
await InsertTorrentsForPage(json);
var result = await Storage.MarkPageAsIngested(name);
if (!result.Success)
{
logger.LogWarning("Failed to mark page as ingested: [{Error}]", result.ErrorMessage);
}
logger.LogInformation("Successfully marked page as ingested");
}
private Torrent? ParseTorrent(JsonElement item)
{
var torrent = new Torrent
{
Source = Source,
Name = item.GetProperty("filename").GetString(),
Size = item.GetProperty("bytes").GetInt64().ToString(),
InfoHash = item.GetProperty("hash").ToString(),
Seeders = 0,
Leechers = 0,
};
if (string.IsNullOrEmpty(torrent.Name))
{
return null;
}
torrent.Category = SeasonMatcher().IsMatch(torrent.Name) ? "tv" : "movies";
return torrent;
}
private async Task InsertTorrentsForPage(JsonDocument json)
{
var torrents = json.RootElement.EnumerateArray()
.Select(ParseTorrent)
.ToList();
if (torrents.Count == 0)
{
logger.LogWarning("No torrents found in {Source} response", Source);
return;
}
await InsertTorrents(torrents!);
}
private async Task<(bool Success, string? Name)> IsAlreadyIngested(JsonElement entry)
{
var name = entry.GetProperty("name").GetString();
if (string.IsNullOrEmpty(name))
{
return (false, null);
}
var pageIngested = await Storage.PageIngested(name);
return (pageIngested, name);
}
}

View File

@@ -0,0 +1,32 @@
namespace Scraper.Crawlers.Sites;
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
protected override string Url => "https://eztv1.xyz/ezrss.xml";
protected override string Source => "EZTV";
private static readonly XNamespace XmlNamespace = "http://xmlns.ezrss.it/0.1/";
protected override IReadOnlyDictionary<string, string> Mappings =>
new Dictionary<string, string>
{
[nameof(Torrent.Name)] = "title",
[nameof(Torrent.Size)] = "contentLength",
[nameof(Torrent.Seeders)] = "seeds",
[nameof(Torrent.Leechers)] = "peers",
[nameof(Torrent.InfoHash)] = "infoHash",
[nameof(Torrent.Category)] = "category",
};
protected override Torrent ParseTorrent(XElement itemNode) =>
new()
{
Source = Source,
Name = itemNode.Element(Mappings[nameof(Torrent.Name)])?.Value,
Size = itemNode.Element(XmlNamespace + Mappings[nameof(Torrent.Size)])?.Value,
Seeders = int.Parse(itemNode.Element(XmlNamespace + Mappings[nameof(Torrent.Seeders)])?.Value ?? "0"),
Leechers = int.Parse(itemNode.Element(XmlNamespace + Mappings[nameof(Torrent.Leechers)])?.Value ?? "0"),
InfoHash = itemNode.Element(XmlNamespace + Mappings[nameof(Torrent.InfoHash)])?.Value,
Category = itemNode.Element(Mappings[nameof(Torrent.Category)])?.Value.ToLowerInvariant(),
};
}

View File

@@ -0,0 +1,143 @@
namespace Scraper.Crawlers.Sites;
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
private static partial Regex SizeStringExtractor();
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
private static partial Regex SizeStringParser();
protected override string Url => "https://tgx.rs/rss";
protected override string Source => "TorrentGalaxy";
protected override IReadOnlyDictionary<string, string> Mappings
=> new Dictionary<string, string>
{
[nameof(Torrent.Name)] = "title",
[nameof(Torrent.Size)] = "description",
[nameof(Torrent.InfoHash)] = "guid",
[nameof(Torrent.Category)] = "category",
};
private static readonly HashSet<string> AllowedCategories =
[
"movies",
"tv",
];
protected override Torrent? ParseTorrent(XElement itemNode)
{
var category = itemNode.Element(Mappings["Category"])?.Value.ToLowerInvariant();
if (category is null)
{
return null;
}
if (!IsAllowedCategory(category))
{
return null;
}
var torrent = new Torrent
{
Source = Source,
Name = itemNode.Element(Mappings["Name"])?.Value,
InfoHash = itemNode.Element(Mappings[nameof(Torrent.InfoHash)])?.Value,
Size = "0",
Seeders = 0,
Leechers = 0,
};
HandleSize(itemNode, torrent, "Size");
torrent.Category = SetCategory(category);
return torrent;
}
private static string SetCategory(string category) =>
category.Contains("tv") switch
{
true => "tv",
_ => category.Contains("movies") switch
{
true => "movies",
_ => "xxx",
},
};
private void HandleSize(XContainer itemNode, Torrent torrent, string key)
{
var description = itemNode.Element(Mappings[key])?.Value;
if (description is null)
{
return;
}
var size = ExtractSizeFromDescription(description);
if (size is not null)
{
torrent.Size = size.ToString();
}
}
private long? ExtractSizeFromDescription(string input)
{
var sizeMatch = SizeStringExtractor().Match(input);
if (!sizeMatch.Success)
{
throw new FormatException("Unable to parse size from the input.");
}
var sizeString = sizeMatch.Groups[1].Value;
var units = new Dictionary<string, long>
{
{ "B", 1 },
{ "KB", 1L << 10 },
{ "MB", 1L << 20 },
{ "GB", 1L << 30 },
{ "TB", 1L << 40 },
};
var match = SizeStringParser().Match(sizeString);
if (match.Success)
{
var val = double.Parse(match.Groups[1].Value);
var unit = match.Groups[3].Value.ToUpper();
if (units.TryGetValue(unit, out var multiplier))
{
try
{
var bytes = checked((long)(val * multiplier));
return bytes;
}
catch (OverflowException)
{
logger.LogWarning("The size '{Size}' is too large.", sizeString);
return null;
}
}
logger.LogWarning("The size unit '{Unit}' is not supported.", unit);
return null;
}
logger.LogWarning("The size '{Size}' is not in a supported format.", sizeString);
return null;
}
private static bool IsAllowedCategory(string category)
{
var parsedCategory = category.Split(':').ElementAtOrDefault(0)?.Trim().ToLower();
return parsedCategory is not null && AllowedCategories.Contains(parsedCategory);
}
}

View File

@@ -0,0 +1,92 @@
namespace Scraper.Crawlers.Sites;
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage) : BaseJsonCrawler(httpClientFactory, logger, storage)
{
protected override string Url => "https://apibay.org/precompiled/data_top100_recent.json";
protected override string Source => "TPB";
// ReSharper disable once UnusedMember.Local
private readonly Dictionary<string, Dictionary<string, int>> TpbCategories = new()
{
{"VIDEO", new() {
{"ALL", 200},
{"MOVIES", 201},
{"MOVIES_DVDR", 202},
{"MUSIC_VIDEOS", 203},
{"MOVIE_CLIPS", 204},
{"TV_SHOWS", 205},
{"HANDHELD", 206},
{"MOVIES_HD", 207},
{"TV_SHOWS_HD", 208},
{"MOVIES_3D", 209},
{"OTHER", 299},
}},
{"PORN", new() {
{"ALL", 500},
{"MOVIES", 501},
{"MOVIES_DVDR", 502},
{"PICTURES", 503},
{"GAMES", 504},
{"MOVIES_HD", 505},
{"MOVIE_CLIPS", 506},
{"OTHER", 599},
}},
};
private static readonly HashSet<int> TvSeriesCategories = [ 205, 208 ];
private static readonly HashSet<int> MovieCategories = [ 201, 202, 207, 209 ];
private static readonly HashSet<int> PornCategories = [ 500, 501, 502, 505, 506 ];
private static readonly HashSet<int> AllowedCategories = [ ..MovieCategories, ..TvSeriesCategories ];
protected override IReadOnlyDictionary<string, string> Mappings
=> new Dictionary<string, string>
{
[nameof(Torrent.Name)] = "name",
[nameof(Torrent.Size)] = "size",
[nameof(Torrent.Seeders)] = "seeders",
[nameof(Torrent.Leechers)] = "leechers",
[nameof(Torrent.InfoHash)] = "info_hash",
[nameof(Torrent.Imdb)] = "imdb",
[nameof(Torrent.Category)] = "category",
};
protected override Torrent? ParseTorrent(JsonElement item)
{
var incomingCategory = item.GetProperty(Mappings["Category"]).GetInt32();
if (!AllowedCategories.Contains(incomingCategory))
{
return null;
}
var torrent = new Torrent
{
Source = Source,
Name = item.GetProperty(Mappings["Name"]).GetString(),
Size = item.GetProperty(Mappings["Size"]).GetInt64().ToString(),
Seeders = item.GetProperty(Mappings["Seeders"]).GetInt32(),
Leechers = item.GetProperty(Mappings["Leechers"]).GetInt32(),
Imdb = item.GetProperty(Mappings["Imdb"]).GetString(),
};
HandleInfoHash(item, torrent, "InfoHash");
torrent.Category = HandleCategory(incomingCategory);
return torrent;
}
private static string HandleCategory(int category) =>
MovieCategories.Contains(category) switch
{
true => "movies",
_ => TvSeriesCategories.Contains(category) switch
{
true => "tv",
_ => "xxx",
},
};
public override Task Execute() => Execute("items");
}

View File

@@ -0,0 +1,44 @@
namespace Scraper.Crawlers.Sites;
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
protected override string Url => "https://yts.am/rss";
protected override string Source => "YTS";
protected override IReadOnlyDictionary<string, string> Mappings
=> new Dictionary<string, string>
{
[nameof(Torrent.Name)] = "title",
[nameof(Torrent.Size)] = "size",
[nameof(Torrent.Seeders)] = "seeders",
[nameof(Torrent.Leechers)] = "leechers",
[nameof(Torrent.InfoHash)] = "enclosure",
};
protected override Torrent? ParseTorrent(XElement itemNode)
{
var torrent = new Torrent
{
Source = Source,
Name = itemNode.Element(Mappings["Name"])?.Value,
Category = "movies",
Size = "0",
Seeders = 0,
Leechers = 0,
};
HandleInfoHash(itemNode, torrent, "InfoHash");
return torrent;
}
protected override void HandleInfoHash(XElement itemNode, Torrent torrent, string infoHashKey)
{
var infoHash = itemNode.Element(Mappings[infoHashKey])?.Attribute("url")?.Value.Split("/download/").ElementAtOrDefault(1);
if (infoHash is not null)
{
torrent.InfoHash = infoHash;
}
}
}

10
src/producer/Dockerfile Normal file
View File

@@ -0,0 +1,10 @@
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build-env
WORKDIR /App
COPY . ./
RUN dotnet restore
RUN dotnet publish -c Release -o out
FROM mcr.microsoft.com/dotnet/aspnet:8.0
WORKDIR /App
COPY --from=build-env /App/out .
ENTRYPOINT ["dotnet", "Producer.dll"]

View File

@@ -0,0 +1,23 @@
namespace Scraper.Extensions;
public static class ConfigurationExtensions
{
private const string ConfigurationFolder = "Configuration";
private const string LoggingConfig = "logging.json";
public static IConfigurationBuilder AddScrapeConfiguration(this IConfigurationBuilder configuration)
{
configuration.SetBasePath(Path.Combine(AppContext.BaseDirectory, ConfigurationFolder));
configuration.AddJsonFile(LoggingConfig, false, true);
configuration.AddJsonFile(ScrapeConfiguration.Filename, false, true);
configuration.AddJsonFile(RabbitMqConfiguration.Filename, false, true);
configuration.AddJsonFile(GithubConfiguration.Filename, false, true);
configuration.AddEnvironmentVariables();
configuration.AddUserSecrets<Program>();
return configuration;
}
}

View File

@@ -0,0 +1,8 @@
namespace Scraper.Extensions;
internal static class ConfigureHostBuilderExtensions
{
internal static IHostBuilder SetupSerilog(this ConfigureHostBuilder builder, IConfiguration configuration) =>
builder.UseSerilog((_, c) =>
c.ReadFrom.Configuration(configuration));
}

View File

@@ -0,0 +1,157 @@
namespace Scraper.Extensions;
public static class ServiceCollectionExtensions
{
internal static IServiceCollection AddCrawlers(this IServiceCollection services)
{
services.AddHttpClient("Scraper");
services
.AddKeyedTransient<ICrawler, EzTvCrawler>(nameof(EzTvCrawler))
.AddKeyedTransient<ICrawler, YtsCrawler>(nameof(YtsCrawler))
.AddKeyedTransient<ICrawler, TpbCrawler>(nameof(TpbCrawler))
.AddKeyedTransient<ICrawler, TgxCrawler>(nameof(TgxCrawler))
.AddKeyedTransient<ICrawler, DebridMediaManagerCrawler>(nameof(DebridMediaManagerCrawler))
.AddSingleton<ICrawlerProvider, CrawlerProvider>()
.AddTransient<IIpService, IpService>();
return services;
}
internal static IServiceCollection AddDataStorage(this IServiceCollection services)
{
services.AddTransient<IDataStorage, DapperDataStorage>();
services.AddTransient<IMessagePublisher, TorrentPublisher>();
return services;
}
internal static IServiceCollection RegisterMassTransit(this IServiceCollection services, IConfiguration configuration)
{
var rabbitConfig = configuration.GetSection(RabbitMqConfiguration.SectionName).Get<RabbitMqConfiguration>();
ArgumentNullException.ThrowIfNull(rabbitConfig, nameof(rabbitConfig));
services.AddSingleton(rabbitConfig);
services.AddMassTransit(busConfigurator =>
{
busConfigurator.SetKebabCaseEndpointNameFormatter();
busConfigurator.UsingRabbitMq((context, busFactoryConfigurator) =>
{
busFactoryConfigurator.Host(rabbitConfig!.Host, hostConfigurator =>
{
hostConfigurator.Username(rabbitConfig.Username);
hostConfigurator.Password(rabbitConfig.Password);
});
});
});
return services;
}
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
{
var scrapeConfiguration = LoadScrapeConfiguration(services, configuration);
var githubConfiguration = LoadGithubConfiguration(services, configuration);
services
.AddTransient<SyncEzTvJob>()
.AddTransient<SyncTpbJob>()
.AddTransient<SyncYtsJob>()
.AddTransient<SyncTgxJob>()
.AddTransient<IPJob>()
.AddTransient<PublisherJob>();
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
{
services.AddTransient<SyncDmmJob>();
}
services.AddQuartz(
quartz =>
{
AddJobWithTrigger<SyncEzTvJob>(quartz, SyncEzTvJob.Key, SyncEzTvJob.Trigger, scrapeConfiguration);
AddJobWithTrigger<SyncTpbJob>(quartz, SyncTpbJob.Key, SyncTpbJob.Trigger, scrapeConfiguration);
AddJobWithTrigger<SyncYtsJob>(quartz, SyncYtsJob.Key, SyncYtsJob.Trigger, scrapeConfiguration);
AddJobWithTrigger<SyncTgxJob>(quartz, SyncTgxJob.Key, SyncTgxJob.Trigger, scrapeConfiguration);
AddJobWithTrigger<IPJob>(quartz, IPJob.Key, IPJob.Trigger, 60 * 5);
AddJobWithTrigger<PublisherJob>(quartz, PublisherJob.Key, PublisherJob.Trigger, 10);
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
{
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
}
});
services.AddQuartzHostedService(
options =>
{
options.WaitForJobsToComplete = true;
});
return services;
}
private static GithubConfiguration LoadGithubConfiguration(IServiceCollection services, IConfiguration configuration)
{
var githubConfiguration = configuration.GetSection(GithubConfiguration.SectionName).Get<GithubConfiguration>();
ArgumentNullException.ThrowIfNull(githubConfiguration, nameof(githubConfiguration));
services.AddSingleton(githubConfiguration);
return githubConfiguration;
}
private static ScrapeConfiguration LoadScrapeConfiguration(IServiceCollection services, IConfiguration configuration)
{
var scrapeConfiguration = configuration.GetSection(ScrapeConfiguration.SectionName).Get<ScrapeConfiguration>();
ArgumentNullException.ThrowIfNull(scrapeConfiguration, nameof(scrapeConfiguration));
services.AddSingleton(scrapeConfiguration);
return scrapeConfiguration;
}
private static void AddJobWithTrigger<TJobType>(
IServiceCollectionQuartzConfigurator quartz,
JobKey key,
TriggerKey trigger,
ScrapeConfiguration scrapeConfiguration) where TJobType : IJob
{
var scraper = scrapeConfiguration.Scrapers
.FirstOrDefault(x => x.Name != null &&
x.Name.Equals(typeof(TJobType).Name, StringComparison.OrdinalIgnoreCase));
if (scraper is null || !scraper.Enabled)
{
return;
}
quartz.AddJob<TJobType>(opts => opts.WithIdentity(key).StoreDurably());
quartz.AddTrigger(
opts => opts
.ForJob(key)
.WithIdentity(trigger)
.StartAt(DateTimeOffset.Now.AddSeconds(20))
.WithSimpleSchedule(x => x.WithInterval(TimeSpan.FromSeconds(scraper.IntervalSeconds)).RepeatForever()));
}
private static void AddJobWithTrigger<TJobType>(
IServiceCollectionQuartzConfigurator quartz,
JobKey key,
TriggerKey trigger,
int interval) where TJobType : IJob
{
quartz.AddJob<TJobType>(opts => opts.WithIdentity(key).StoreDurably());
quartz.AddTrigger(
opts => opts
.ForJob(key)
.WithIdentity(trigger)
.StartAt(DateTimeOffset.Now.AddSeconds(20))
.WithSimpleSchedule(x => x.WithInterval(TimeSpan.FromSeconds(interval)).RepeatForever()));
}
}

View File

@@ -0,0 +1,22 @@
// Global using directives
global using System.Text;
global using System.Text.Json;
global using System.Text.RegularExpressions;
global using System.Xml.Linq;
global using Dapper;
global using LZStringCSharp;
global using MassTransit;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.Logging;
global using Npgsql;
global using Quartz;
global using Scraper.Crawlers;
global using Scraper.Crawlers.Sites;
global using Scraper.Extensions;
global using Scraper.Interfaces;
global using Scraper.Jobs;
global using Scraper.Models;
global using Scraper.Services;
global using Serilog;

View File

@@ -0,0 +1,6 @@
namespace Scraper.Interfaces;
public interface ICrawler
{
Task Execute();
}

View File

@@ -0,0 +1,8 @@
namespace Scraper.Interfaces;
public interface ICrawlerProvider
{
IEnumerable<ICrawler> GetAll();
ICrawler Get(string name);
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Interfaces;
public interface IDataStorage
{
Task<InsertTorrentResult> InsertTorrents(IReadOnlyCollection<Torrent> torrents, CancellationToken cancellationToken = default);
Task<IReadOnlyCollection<Torrent>> GetPublishableTorrents(CancellationToken cancellationToken = default);
Task<UpdatedTorrentResult> SetTorrentsProcessed(IReadOnlyCollection<Torrent> torrents, CancellationToken cancellationToken = default);
Task<bool> PageIngested(string pageId, CancellationToken cancellationToken = default);
Task<PageIngestedResult> MarkPageAsIngested(string pageId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,6 @@
namespace Scraper.Interfaces;
public interface IIpService
{
Task GetPublicIpAddress();
}

View File

@@ -0,0 +1,6 @@
namespace Scraper.Interfaces;
public interface IMessagePublisher
{
Task PublishAsync(IEnumerable<Torrent> torrents, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,23 @@
namespace Scraper.Jobs;
public abstract class BaseJob(ICrawlerProvider crawlerProvider) : IJob
{
public async Task Execute(IJobExecutionContext context)
{
if (context.RefireCount > 5)
{
throw new InvalidOperationException("Job failed too many times");
}
try
{
await crawlerProvider.Get(Crawler).Execute();
}
catch (Exception ex)
{
throw new JobExecutionException(msg: "", refireImmediately: true, cause: ex);
}
}
protected abstract string Crawler { get; }
}

View File

@@ -0,0 +1,7 @@
namespace Scraper.Jobs;
public interface ICrawlerJob<out TCrawler> : IJob
where TCrawler : ICrawler
{
TCrawler CrawlerType { get; }
}

View File

@@ -0,0 +1,14 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class IPJob(IIpService ipService) : IJob
{
private const string JobName = nameof(IPJob);
public static readonly JobKey Key = new(JobName, nameof(Jobs));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Jobs));
public Task Execute(IJobExecutionContext context)
{
return ipService.GetPublicIpAddress();
}
}

View File

@@ -0,0 +1,31 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class PublisherJob(IMessagePublisher publisher, IDataStorage storage, ILogger<PublisherJob> logger) : IJob
{
private const string JobName = nameof(PublisherJob);
public static readonly JobKey Key = new(JobName, nameof(Jobs));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Jobs));
public async Task Execute(IJobExecutionContext context)
{
var cancellationToken = context.CancellationToken;
var torrents = await storage.GetPublishableTorrents(cancellationToken);
if (torrents.Count == 0)
{
return;
}
await publisher.PublishAsync(torrents, cancellationToken);
var result = await storage.SetTorrentsProcessed(torrents, cancellationToken);
if (!result.Success)
{
logger.LogWarning("Failed to set torrents as processed: [{Error}]", result.ErrorMessage);
return;
}
logger.LogInformation("Successfully set {Count} torrents as processed", result.UpdatedCount);
}
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncDmmJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(DebridMediaManagerCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(DebridMediaManagerCrawler);
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncEzTvJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(EzTvCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(EzTvCrawler);
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncTgxJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(TgxCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(TgxCrawler);
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncTpbJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(TpbCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(TpbCrawler);
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Jobs;
[DisallowConcurrentExecution]
public class SyncYtsJob(ICrawlerProvider crawlerProvider) : BaseJob(crawlerProvider)
{
private const string JobName = nameof(YtsCrawler);
public static readonly JobKey Key = new(JobName, nameof(Crawlers));
public static readonly TriggerKey Trigger = new($"{JobName}-trigger", nameof(Crawlers));
protected override string Crawler => nameof(YtsCrawler);
}

View File

@@ -0,0 +1,9 @@
namespace Scraper.Models;
public class GithubConfiguration
{
public const string SectionName = "GithubSettings";
public const string Filename = "github.json";
public string? PAT { get; set; }
}

View File

@@ -0,0 +1,13 @@
namespace Scraper.Models;
public class RabbitMqConfiguration
{
public const string SectionName = "RabbitMqConfiguration";
public const string Filename = "rabbitmq.json";
public string? Host { get; set; }
public string? Username { get; set; }
public string? Password { get; set; }
public string? QueueName { get; set; }
public bool Durable { get; set; }
}

View File

@@ -0,0 +1,5 @@
namespace Scraper.Models;
public record InsertTorrentResult(bool Success, int InsertedCount = 0, string? ErrorMessage = null);
public record UpdatedTorrentResult(bool Success, int UpdatedCount = 0, string? ErrorMessage = null);
public record PageIngestedResult(bool Success, string? ErrorMessage = null);

View File

@@ -0,0 +1,10 @@
namespace Scraper.Models;
public class ScrapeConfiguration
{
public const string SectionName = "ScrapeConfiguration";
public const string Filename = "scrapers.json";
public List<Scraper> Scrapers { get; set; } = [];
public string StorageConnectionString { get; set; } = "";
}

View File

@@ -0,0 +1,10 @@
namespace Scraper.Models;
public class Scraper
{
public string? Name { get; set; }
public int IntervalSeconds { get; set; } = 60;
public bool Enabled { get; set; } = true;
}

Some files were not shown because too many files have changed in this diff Show More