1 Commits

Author SHA1 Message Date
purple_emily
2e65ff9276 Pull changes from Torrentio 2024-04-01 17:20:33 +01:00
65 changed files with 1443 additions and 1921 deletions

View File

@@ -12,9 +12,6 @@ A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1.
2.
3.
**Expected behavior**
A clear and concise description of what you expected to happen.
@@ -26,7 +23,6 @@ If the logs are short, make sure to triple backtick them, or use https://pastebi
**Hardware:**
- OS and distro: [e.g. Raspberry Pi OS, Ubuntu, Rocky]
- Server: [e.g. VM, Baremetal, Pi]
- Knightcrawler Version: [2.0.xx]
**Additional context**
Add any other context about the problem here.

View File

@@ -1,39 +0,0 @@
on:
push:
branches:
- main
workflow_dispatch:
jobs:
changelog:
name: Generate changelog
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Generate a changelog
uses: orhun/git-cliff-action@v3
with:
config: cliff.toml
args: --verbose
env:
OUTPUT: CHANGELOG.md
GITHUB_REPO: ${{ github.repository }}
- name: Commit
run: |
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
set +e
git checkout -b feat/changelog_$(date +"%d_%m")
git add CHANGELOG.md
git commit -m "[skip ci] Update changelog"
git push https://${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git feat/changelog_$(date +"%d_%m")
- name: create pull request
run: gh pr create -B main -H feat/changelog_$(date +"%d_%m") --title '[skip ci] Update changelog' --body 'Changelog update by git-cliff'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,22 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.0.0] - 2024-03-25
### Details
#### Changed
- Change POSTGRES_USERNAME to POSTGRES_USER. Oops by @purple-emily
- Change POSTGRES_DATABASE to POSTGRES_DB by @purple-emily
- Two movie commands instead of movie and tv by @purple-emily
- Cleanup RabbitMQ env vars, and Github Pat by @iPromKnight
#### Fixed
- HRD -> HDR by @mplewis
## New Contributors
* @mplewis made their first contribution
<!-- generated by git-cliff -->

View File

@@ -1,34 +0,0 @@
We use [Meaningful commit messages](https://reflectoring.io/meaningful-commit-messages/)
Tl;dr:
1. It should answer the question: “What happens if the changes are applied?".
2. Use the imperative, present tense. It is easier to read and scan quickly:
```
Right: Add feature to alert admin for new user registration
Wrong: Added feature ... (past tense)
```
3. The summary should always be able to complete the following sentence:
`If applied, this commit will… `
We use [git-cliff] for our changelog.
The breaking flag is set to true when the commit has an exclamation mark after the commit type and scope, e.g.:
`feat(scope)!: this is a breaking change`
Keywords (Commit messages should start with these):
```
# Added
add
support
# Removed
remove
delete
# Fixed
test
fix
```
Any other commits will fall under the `Changed` category
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html)

View File

@@ -67,6 +67,30 @@ Then set any of the values you wouldd like to customize.
By default, Knight Crawler is configured to be *relatively* conservative in its resource usage. If running on a decent machine (16GB RAM, i5+ or equivalent), you can increase some settings to increase consumer throughput. This is especially helpful if you have a large backlog from [importing databases](#importing-external-dumps).
### DebridMediaManager setup (optional)
There are some optional steps you should take to maximise the number of movies/tv shows we can find.
We can search DebridMediaManager hash lists which are hosted on GitHub. This allows us to add hundreds of thousands of movies and tv shows, but it requires a Personal Access Token to be generated. The software only needs read access and only for public repositories. To generate one, please follow these steps:
1. Navigate to GitHub settings -> Developer Settings -> Personal access tokens -> Fine-grained tokens (click [here](https://github.com/settings/tokens?type=beta) for a direct link)
2. Press `Generate new token`
3. Fill out the form (example data below):
```
Token name:
KnightCrawler
Expiration:
90 days
Description:
<blank>
Repository access
(checked) Public Repositories (read-only)
```
4. Click `Generate token`
5. Take the new token and add it to the bottom of the [stack.env](deployment/docker/stack.env) file
```
GITHUB_PAT=<YOUR TOKEN HERE>
```
### Configure external access
Please choose which applies to you:

View File

@@ -1,112 +0,0 @@
# git-cliff ~ configuration file
# https://git-cliff.org/docs/configuration
[changelog]
# changelog header
header = """
# Changelog\n
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{%- macro remote_url() -%}
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
{%- endmacro -%}
{% if version -%}
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else -%}
## [Unreleased]
{% endif -%}
### Details\
{% for group, commits in commits | group_by(attribute="group") %}
#### {{ group | upper_first }}
{%- for commit in commits %}
- {{ commit.message | upper_first | trim }}\
{% if commit.github.username %} by @{{ commit.github.username }}{%- endif -%}
{% if commit.github.pr_number %} in \
[#{{ commit.github.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.github.pr_number }}) \
{%- endif -%}
{% endfor %}
{% endfor %}
{%- if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %}
## New Contributors
{%- endif -%}
{% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %}
* @{{ contributor.username }} made their first contribution
{%- if contributor.pr_number %} in \
[#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \
{%- endif %}
{%- endfor %}\n
"""
# template for the changelog footer
footer = """
{%- macro remote_url() -%}
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
{%- endmacro -%}
{% for release in releases -%}
{% if release.version -%}
{% if release.previous.version -%}
[{{ release.version | trim_start_matches(pat="v") }}]: \
{{ self::remote_url() }}/compare/{{ release.previous.version }}..{{ release.version }}
{% endif -%}
{% else -%}
[unreleased]: {{ self::remote_url() }}/compare/{{ release.previous.version }}..HEAD
{% endif -%}
{% endfor %}
<!-- generated by git-cliff -->
"""
# remove the leading and trailing whitespace from the templates
trim = true
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
# remove issue numbers from commits
{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "" },
]
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^.*: add", group = "Added" },
{ message = "^add", group = "Added" },
{ message = "^.*: support", group = "Added" },
{ message = "^support", group = "Added" },
{ message = "^.*: remove", group = "Removed" },
{ message = "^remove", group = "Removed" },
{ message = "^.*: delete", group = "Removed" },
{ message = "^delete", group = "Removed" },
{ message = "^.*: test", group = "Fixed" },
{ message = "^test", group = "Fixed" },
{ message = "^.*: fix", group = "Fixed" },
{ message = "^fix", group = "Fixed" },
{ message = "^.*", group = "Changed" },
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = true
# regex for matching git tags
tag_pattern = "v[0-9].*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"

View File

@@ -14,14 +14,12 @@ program=
[BitTorrent]
Session\AnonymousModeEnabled=true
Session\BTProtocol=TCP
Session\ConnectionSpeed=150
Session\DefaultSavePath=/downloads/
Session\ExcludedFileNames=
Session\MaxActiveCheckingTorrents=20
Session\MaxActiveDownloads=20
Session\MaxActiveCheckingTorrents=5
Session\MaxActiveDownloads=10
Session\MaxActiveTorrents=50
Session\MaxActiveUploads=50
Session\MaxConcurrentHTTPAnnounces=1000
Session\MaxConnections=2000
Session\Port=6881
Session\QueueingSystemEnabled=true

View File

@@ -94,7 +94,7 @@ services:
condition: service_healthy
env_file: stack.env
hostname: knightcrawler-addon
image: gabisonfire/knightcrawler-addon:2.0.26
image: gabisonfire/knightcrawler-addon:2.0.18
labels:
logging: promtail
networks:
@@ -117,7 +117,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-consumer:2.0.26
image: gabisonfire/knightcrawler-consumer:2.0.18
labels:
logging: promtail
networks:
@@ -138,7 +138,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
labels:
logging: promtail
networks:
@@ -152,7 +152,7 @@ services:
migrator:
condition: service_completed_successfully
env_file: stack.env
image: gabisonfire/knightcrawler-metadata:2.0.26
image: gabisonfire/knightcrawler-metadata:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -163,7 +163,7 @@ services:
postgres:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-migrator:2.0.26
image: gabisonfire/knightcrawler-migrator:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -182,7 +182,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-producer:2.0.26
image: gabisonfire/knightcrawler-producer:2.0.18
labels:
logging: promtail
networks:
@@ -207,7 +207,7 @@ services:
deploy:
replicas: ${QBIT_REPLICAS:-0}
env_file: stack.env
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
labels:
logging: promtail
networks:

View File

@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
services:
metadata:
image: gabisonfire/knightcrawler-metadata:2.0.26
image: gabisonfire/knightcrawler-metadata:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -30,7 +30,7 @@ services:
condition: service_completed_successfully
migrator:
image: gabisonfire/knightcrawler-migrator:2.0.26
image: gabisonfire/knightcrawler-migrator:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -40,7 +40,7 @@ services:
condition: service_healthy
addon:
image: gabisonfire/knightcrawler-addon:2.0.26
image: gabisonfire/knightcrawler-addon:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
hostname: knightcrawler-addon
@@ -48,22 +48,22 @@ services:
- "7000:7000"
consumer:
image: gabisonfire/knightcrawler-consumer:2.0.26
image: gabisonfire/knightcrawler-consumer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
debridcollector:
image: gabisonfire/knightcrawler-debrid-collector:2.0.26
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
producer:
image: gabisonfire/knightcrawler-producer:2.0.26
image: gabisonfire/knightcrawler-producer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
qbitcollector:
image: gabisonfire/knightcrawler-qbit-collector:2.0.26
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
depends_on:

View File

@@ -32,10 +32,12 @@ COLLECTOR_DEBRID_ENABLED=true
COLLECTOR_REAL_DEBRID_API_KEY=
QBIT_HOST=http://qbittorrent:8080
QBIT_TRACKERS_URL=https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt
QBIT_CONCURRENCY=8
# Number of replicas for the qBittorrent collector and qBitTorrent client. Should be 0 or 1.
QBIT_REPLICAS=0
# Addon
DEBUG_MODE=false
# Producer
GITHUB_PAT=

View File

@@ -12,7 +12,7 @@
"@redis/client": "^1.5.14",
"@redis/json": "^1.0.6",
"@redis/search": "^1.1.6",
"all-debrid-api": "^1.2.0",
"all-debrid-api": "^1.1.0",
"axios": "^1.6.1",
"bottleneck": "^2.19.5",
"cache-manager": "^3.4.4",

View File

@@ -10,7 +10,7 @@
},
"dependencies": {
"@putdotio/api-client": "^8.42.0",
"all-debrid-api": "^1.2.0",
"all-debrid-api": "^1.1.0",
"axios": "^1.6.1",
"bottleneck": "^2.19.5",
"cache-manager": "^3.4.4",

View File

@@ -3,7 +3,6 @@ import { addonBuilder } from 'stremio-addon-sdk';
import { cacheWrapStream } from './lib/cache.js';
import { dummyManifest } from './lib/manifest.js';
import * as repository from './lib/repository.js';
import applyFilters from "./lib/filter.js";
import applySorting from './lib/sort.js';
import { toStreamInfo, applyStaticInfo } from './lib/streamInfo.js';
import { Type } from './lib/types.js';
@@ -33,7 +32,6 @@ builder.defineStreamHandler((args) => {
.then(records => records
.sort((a, b) => b.torrent.seeders - a.torrent.seeders || b.torrent.uploadDate - a.torrent.uploadDate)
.map(record => toStreamInfo(record)))))
.then(streams => applyFilters(streams, args.extra))
.then(streams => applySorting(streams, args.extra))
.then(streams => applyStaticInfo(streams))
.then(streams => applyMochs(streams, args.extra))

View File

@@ -84,7 +84,7 @@ export function getImdbIdMovieEntries(imdbId) {
where: {
imdbId: { [Op.eq]: imdbId }
},
include: { model: Torrent, required: true },
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -99,7 +99,7 @@ export function getImdbIdSeriesEntries(imdbId, season, episode) {
imdbSeason: { [Op.eq]: season },
imdbEpisode: { [Op.eq]: episode }
},
include: { model: Torrent, required: true },
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -112,7 +112,7 @@ export function getKitsuIdMovieEntries(kitsuId) {
where: {
kitsuId: { [Op.eq]: kitsuId }
},
include: { model: Torrent, required: true },
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']
@@ -126,7 +126,7 @@ export function getKitsuIdSeriesEntries(kitsuId, episode) {
kitsuId: { [Op.eq]: kitsuId },
kitsuEpisode: { [Op.eq]: episode }
},
include: { model: Torrent, required: true },
include: [Torrent],
limit: 500,
order: [
[Torrent, 'size', 'DESC']

View File

@@ -20,7 +20,7 @@ export function toStreamInfo(record) {
const title = joinDetailParts(
[
joinDetailParts([record.torrent.title.replace(/[, ]+/g, ' ')]),
joinDetailParts([record.title || undefined]),
joinDetailParts([!sameInfo && record.title || undefined]),
joinDetailParts([
joinDetailParts([formatSize(record.size)], '💾 ')
]),

View File

@@ -9,187 +9,187 @@ const KEY = 'alldebrid';
const AGENT = 'knightcrawler';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
const hashes = streams.map(stream => stream.infoHash);
const available = await AD.magnet.instant(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
return undefined;
});
return available?.data?.magnets && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
return mochStreams;
}, {})
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
const hashes = streams.map(stream => stream.infoHash);
const available = await AD.magnet.instant(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn(`Failed AllDebrid cached [${hashes[0]}] torrent availability request:`, error);
return undefined;
});
return available?.data?.magnets && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available.data.magnets.find(magnet => stream.infoHash === magnet.hash.toLowerCase());
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: cachedEntry?.instant
}
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status()
.then(response => response.data.magnets)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent.statusCode))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status()
.then(response => response.data.magnets)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent.statusCode))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
}
export async function getItemMeta(itemId, apiKey) {
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status(itemId)
.then(response => response.data.magnets)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.links
.filter(file => isVideo(file.filename))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.filename,
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
}))
}))
const options = await getDefaultOptions();
const AD = new AllDebridClient(apiKey, options);
return AD.magnet.status(itemId)
.then(response => response.data.magnets)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.links
.filter(file => isVideo(file.filename))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.filename,
released: new Date(torrent.uploadDate * 1000 - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/${encodeURIComponent(file.filename)}/${index}` }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const AD = new AllDebridClient(apiKey, options);
console.log(`Unrestricting AllDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const AD = new AllDebridClient(apiKey, options);
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
} else if (error.code === 'MAGNET_TOO_MANY') {
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
});
return _resolve(AD, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to AllDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
} else if (error.code === 'MAGNET_TOO_MANY') {
console.log(`Deleting and retrying adding to AllDebrid ${infoHash} [${fileIndex}]...`);
return _deleteAndRetry(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(AD, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(AD, infoHash);
if (torrent && statusReady(torrent.statusCode)) {
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.statusCode)) {
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusHandledError(torrent.statusCode)) {
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
}
const torrent = await _createOrFindTorrent(AD, infoHash);
if (torrent && statusReady(torrent.statusCode)) {
return _unrestrictLink(AD, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.statusCode)) {
console.log(`Downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusHandledError(torrent.statusCode)) {
console.log(`Retrying downloading to AllDebrid ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(AD, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed AllDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(AD, infoHash) {
return _findTorrent(AD, infoHash)
.catch(() => _createTorrent(AD, infoHash));
return _findTorrent(AD, infoHash)
.catch(() => _createTorrent(AD, infoHash));
}
async function _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(AD, infoHash);
return newTorrent && statusReady(newTorrent.statusCode)
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(AD, infoHash);
return newTorrent && statusReady(newTorrent.statusCode)
? _unrestrictLink(AD, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _deleteAndRetry(AD, infoHash, encodedFileName, fileIndex) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const lastTorrent = torrents[torrents.length - 1];
return AD.magnet.delete(lastTorrent.id)
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const lastTorrent = torrents[torrents.length - 1];
return AD.magnet.delete(lastTorrent.id)
.then(() => _retryCreateTorrent(AD, infoHash, encodedFileName, fileIndex));
}
async function _findTorrent(AD, infoHash) {
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await AD.magnet.status().then(response => response.data.magnets);
const foundTorrents = torrents.filter(torrent => torrent.hash.toLowerCase() === infoHash);
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(AD, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await AD.magnet.upload(magnetLink);
const torrentId = uploadResponse.data.magnets[0].id;
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await AD.magnet.upload(magnetLink);
const torrentId = uploadResponse.data.magnets[0].id;
return AD.magnet.status(torrentId).then(statusResponse => statusResponse.data.magnets);
}
async function _unrestrictLink(AD, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos.sort((a, b) => b.size - a.size)[0];
const targetFileName = decodeURIComponent(encodedFileName);
const videos = torrent.links.filter(link => isVideo(link.filename)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.filename))
: videos[0];
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
return StaticResponse.FAILED_RAR;
}
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
}
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
if (!targetVideo && torrent.links.every(link => isArchive(link.filename))) {
console.log(`Only AllDebrid archive is available for [${torrent.hash}] ${encodedFileName}`)
return StaticResponse.FAILED_RAR;
}
if (!targetVideo || !targetVideo.link || !targetVideo.link.length) {
return Promise.reject(`No AllDebrid links found for [${torrent.hash}] ${encodedFileName}`);
}
const unrestrictedLink = await AD.link.unlock(targetVideo.link).then(response => response.data.link);
console.log(`Unrestricted AllDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
async function getDefaultOptions(ip) {
return { base_agent: AGENT, timeout: 10000 };
return { base_agent: AGENT, timeout: 10000 };
}
export function toCommonError(error) {
if (error && error.code === 'AUTH_BAD_APIKEY') {
return BadTokenError;
}
if (error && error.code === 'AUTH_USER_BANNED') {
return AccessDeniedError;
}
return undefined;
if (error && error.code === 'AUTH_BAD_APIKEY') {
return BadTokenError;
}
if (error && error.code === 'AUTH_USER_BANNED') {
return AccessDeniedError;
}
return undefined;
}
function statusError(statusCode) {
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
return [5, 6, 7, 8, 9, 10, 11].includes(statusCode);
}
function statusHandledError(statusCode) {
return [5, 7, 9, 10, 11].includes(statusCode);
return [5, 7, 9, 10, 11].includes(statusCode);
}
function statusDownloading(statusCode) {
return [0, 1, 2, 3].includes(statusCode);
return [0, 1, 2, 3].includes(statusCode);
}
function statusReady(statusCode) {
return statusCode === 4;
return statusCode === 4;
}
function errorExpiredSubscriptionError(error) {
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
.includes(error.code);
return ['AUTH_BAD_APIKEY', 'MUST_BE_PREMIUM', 'MAGNET_MUST_BE_PREMIUM', 'FREE_TRIAL_LIMIT_REACHED', 'AUTH_USER_BANNED']
.includes(error.code);
}

View File

@@ -8,148 +8,148 @@ import StaticResponse from './static.js';
const KEY = 'debridlink';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
.map(batch => batch.join(','));
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
.then(results => results.map(result => result.value))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed DebridLink cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};
return mochStreams;
}, {})
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 50)
.map(batch => batch.join(','));
const available = await Promise.all(hashBatches.map(hashes => DL.seedbox.cached(hashes)))
.then(results => results.map(result => result.value))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed DebridLink cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedEntry
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list()
.then(response => response.value)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list()
.then(response => response.value)
.then(torrents => (torrents || [])
.filter(torrent => torrent && statusReady(torrent))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list(itemId)
.then(response => response.value[0])
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name,
infoHash: torrent.hashString.toLowerCase(),
videos: torrent.files
.filter(file => isVideo(file.name))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.name,
released: new Date(torrent.created * 1000 - index).toISOString(),
streams: [{ url: file.downloadUrl }]
}))
}))
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return DL.seedbox.list(itemId)
.then(response => response.value[0])
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name,
infoHash: torrent.hashString.toLowerCase(),
videos: torrent.files
.filter(file => isVideo(file.name))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${index}`,
title: file.name,
released: new Date(torrent.created * 1000 - index).toISOString(),
streams: [{ url: file.downloadUrl }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
console.log(`Unrestricting DebridLink ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const DL = new DebridLinkClient(apiKey, options);
return _resolve(DL, infoHash, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
});
return _resolve(DL, infoHash, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to DebridLink ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(DL, infoHash, fileIndex) {
const torrent = await _createOrFindTorrent(DL, infoHash);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(DL, torrent, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
}
const torrent = await _createOrFindTorrent(DL, infoHash);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(DL, torrent, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to DebridLink ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
}
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed DebridLink adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(DL, infoHash) {
return _findTorrent(DL, infoHash)
.catch(() => _createTorrent(DL, infoHash));
return _findTorrent(DL, infoHash)
.catch(() => _createTorrent(DL, infoHash));
}
async function _findTorrent(DL, infoHash) {
const torrents = await DL.seedbox.list().then(response => response.value);
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
return foundTorrents[0] || Promise.reject('No recent torrent found');
const torrents = await DL.seedbox.list().then(response => response.value);
const foundTorrents = torrents.filter(torrent => torrent.hashString.toLowerCase() === infoHash);
return foundTorrents[0] || Promise.reject('No recent torrent found');
}
async function _createTorrent(DL, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
return uploadResponse.value;
const magnetLink = await getMagnetLink(infoHash);
const uploadResponse = await DL.seedbox.add(magnetLink, null, true);
return uploadResponse.value;
}
async function _unrestrictLink(DL, torrent, fileIndex) {
const targetFile = Number.isInteger(fileIndex)
? torrent.files[fileIndex]
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
const targetFile = Number.isInteger(fileIndex)
? torrent.files[fileIndex]
: torrent.files.filter(file => file.downloadPercent === 100).sort((a, b) => b.size - a.size)[0];
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
if (!targetFile || !targetFile.downloadUrl) {
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
return targetFile.downloadUrl;
if (!targetFile && torrent.files.every(file => isArchive(file.downloadUrl))) {
console.log(`Only DebridLink archive is available for [${torrent.hashString}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
if (!targetFile || !targetFile.downloadUrl) {
return Promise.reject(`No DebridLink links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted DebridLink ${torrent.hashString} [${fileIndex}] to ${targetFile.downloadUrl}`);
return targetFile.downloadUrl;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error === 'badToken') {
return BadTokenError;
}
return undefined;
if (error === 'badToken') {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return torrent.downloadPercent < 100
return torrent.downloadPercent < 100
}
function statusReady(torrent) {
return torrent.downloadPercent === 100;
return torrent.downloadPercent === 100;
}
function errorExpiredSubscriptionError(error) {
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
return ['freeServerOverload', 'maxTorrent', 'maxLink', 'maxLinkHost', 'maxData', 'maxDataHost'].includes(error);
}

View File

@@ -15,226 +15,226 @@ const RESOLVE_TIMEOUT = 2 * 60 * 1000; // 2 minutes
const MIN_API_KEY_SYMBOLS = 15;
const TOKEN_BLACKLIST = [];
export const MochOptions = {
realdebrid: {
key: 'realdebrid',
instance: realdebrid,
name: "RealDebrid",
shortName: 'RD',
catalog: true
},
premiumize: {
key: 'premiumize',
instance: premiumize,
name: 'Premiumize',
shortName: 'PM',
catalog: true
},
alldebrid: {
key: 'alldebrid',
instance: alldebrid,
name: 'AllDebrid',
shortName: 'AD',
catalog: true
},
debridlink: {
key: 'debridlink',
instance: debridlink,
name: 'DebridLink',
shortName: 'DL',
catalog: true
},
offcloud: {
key: 'offcloud',
instance: offcloud,
name: 'Offcloud',
shortName: 'OC',
catalog: true
},
putio: {
key: 'putio',
instance: putio,
name: 'Put.io',
shortName: 'Putio',
catalog: true
}
realdebrid: {
key: 'realdebrid',
instance: realdebrid,
name: "RealDebrid",
shortName: 'RD',
catalog: true
},
premiumize: {
key: 'premiumize',
instance: premiumize,
name: 'Premiumize',
shortName: 'PM',
catalog: true
},
alldebrid: {
key: 'alldebrid',
instance: alldebrid,
name: 'AllDebrid',
shortName: 'AD',
catalog: true
},
debridlink: {
key: 'debridlink',
instance: debridlink,
name: 'DebridLink',
shortName: 'DL',
catalog: true
},
offcloud: {
key: 'offcloud',
instance: offcloud,
name: 'Offcloud',
shortName: 'OC',
catalog: true
},
putio: {
key: 'putio',
instance: putio,
name: 'Put.io',
shortName: 'Putio',
catalog: true
}
};
const unrestrictQueues = {}
Object.values(MochOptions)
.map(moch => moch.key)
.forEach(mochKey => unrestrictQueues[mochKey] = new namedQueue((task, callback) => task.method()
.then(result => callback(false, result))
.catch((error => callback(error))), 200));
.then(result => callback(false, result))
.catch((error => callback(error))), 200));
export function hasMochConfigured(config) {
return Object.keys(MochOptions).find(moch => config?.[moch])
return Object.keys(MochOptions).find(moch => config?.[moch])
}
export async function applyMochs(streams, config) {
if (!streams?.length || !hasMochConfigured(config)) {
return streams;
}
return Promise.all(Object.keys(config)
.filter(configKey => MochOptions[configKey])
.map(configKey => MochOptions[configKey])
.map(moch => {
if (isInvalidToken(config[moch.key], moch.key)) {
return { moch, error: BadTokenError };
}
return moch.instance.getCachedStreams(streams, config[moch.key])
.then(mochStreams => ({ moch, mochStreams }))
.catch(rawError => {
const error = moch.instance.toCommonError(rawError) || rawError;
if (error === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return { moch, error };
})
}))
.then(results => processMochResults(streams, config, results));
if (!streams?.length || !hasMochConfigured(config)) {
return streams;
}
return Promise.all(Object.keys(config)
.filter(configKey => MochOptions[configKey])
.map(configKey => MochOptions[configKey])
.map(moch => {
if (isInvalidToken(config[moch.key], moch.key)) {
return { moch, error: BadTokenError };
}
return moch.instance.getCachedStreams(streams, config[moch.key])
.then(mochStreams => ({ moch, mochStreams }))
.catch(rawError => {
const error = moch.instance.toCommonError(rawError) || rawError;
if (error === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return { moch, error };
})
}))
.then(results => processMochResults(streams, config, results));
}
export async function resolve(parameters) {
const moch = MochOptions[parameters.mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
}
const moch = MochOptions[parameters.mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${parameters.mochKey}`));
}
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
return Promise.reject(new Error("No valid parameters passed"));
}
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
.catch(error => {
console.warn(error);
return StaticResponse.FAILED_UNEXPECTED;
})
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
const unrestrictQueue = unrestrictQueues[moch.key];
return new Promise(((resolve, reject) => {
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
}));
if (!parameters.apiKey || !parameters.infoHash || !parameters.cachedEntryInfo) {
return Promise.reject(new Error("No valid parameters passed"));
}
const id = `${parameters.ip}_${parameters.mochKey}_${parameters.apiKey}_${parameters.infoHash}_${parameters.fileIndex}`;
const method = () => timeout(RESOLVE_TIMEOUT, cacheWrapResolvedUrl(id, () => moch.instance.resolve(parameters)))
.catch(error => {
console.warn(error);
return StaticResponse.FAILED_UNEXPECTED;
})
.then(url => isStaticUrl(url) ? `${parameters.host}/${url}` : url);
const unrestrictQueue = unrestrictQueues[moch.key];
return new Promise(((resolve, reject) => {
unrestrictQueue.push({ id, method }, (error, result) => result ? resolve(result) : reject(error));
}));
}
export async function getMochCatalog(mochKey, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
if (isInvalidToken(config[mochKey], mochKey)) {
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
}
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
.catch(rawError => {
const commonError = moch.instance.toCommonError(rawError);
if (commonError === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return commonError ? [] : Promise.reject(rawError);
});
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
if (isInvalidToken(config[mochKey], mochKey)) {
return Promise.reject(new Error(`Invalid API key for moch provider: ${mochKey}`));
}
return moch.instance.getCatalog(config[moch.key], config.skip, config.ip)
.catch(rawError => {
const commonError = moch.instance.toCommonError(rawError);
if (commonError === BadTokenError) {
blackListToken(config[moch.key], moch.key);
}
return commonError ? [] : Promise.reject(rawError);
});
}
export async function getMochItemMeta(mochKey, itemId, config) {
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
const moch = MochOptions[mochKey];
if (!moch) {
return Promise.reject(new Error(`Not a valid moch provider: ${mochKey}`));
}
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
.then(meta => enrichMeta(meta))
.then(meta => {
meta.videos.forEach(video => video.streams.forEach(stream => {
if (!stream.url.startsWith('http')) {
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
}
stream.behaviorHints = { bingeGroup: itemId }
}))
return meta;
});
return moch.instance.getItemMeta(itemId, config[moch.key], config.ip)
.then(meta => enrichMeta(meta))
.then(meta => {
meta.videos.forEach(video => video.streams.forEach(stream => {
if (!stream.url.startsWith('http')) {
stream.url = `${config.host}/${moch.key}/${stream.url}/${streamFilename(video)}`
}
stream.behaviorHints = { bingeGroup: itemId }
}))
return meta;
});
}
function processMochResults(streams, config, results) {
const errorResults = results
.map(result => errorStreamResponse(result.moch.key, result.error, config))
.filter(errorResponse => errorResponse);
if (errorResults.length) {
return errorResults;
}
const errorResults = results
.map(result => errorStreamResponse(result.moch.key, result.error, config))
.filter(errorResponse => errorResponse);
if (errorResults.length) {
return errorResults;
}
const includeTorrentLinks = options.includeTorrentLinks(config);
const excludeDownloadLinks = options.excludeDownloadLinks(config);
const mochResults = results.filter(result => result?.mochStreams);
const includeTorrentLinks = options.includeTorrentLinks(config);
const excludeDownloadLinks = options.excludeDownloadLinks(config);
const mochResults = results.filter(result => result?.mochStreams);
const cachedStreams = mochResults
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
const cachedStreams = mochResults
.reduce((resultStreams, mochResult) => populateCachedLinks(resultStreams, mochResult, config), streams);
const resultStreams = excludeDownloadLinks ? cachedStreams : populateDownloadLinks(cachedStreams, mochResults, config);
return includeTorrentLinks ? resultStreams : resultStreams.filter(stream => stream.url);
}
function populateCachedLinks(streams, mochResult, config) {
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[stream.infoHash];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
};
}
return stream;
});
return streams.map(stream => {
const cachedEntry = stream.infoHash && mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
if (cachedEntry?.cached) {
return {
name: `[${mochResult.moch.shortName}+] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
};
}
return stream;
});
}
function populateDownloadLinks(streams, mochResults, config) {
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[stream.infoHash];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
})
}
}));
return streams;
const torrentStreams = streams.filter(stream => stream.infoHash);
const seededStreams = streams.filter(stream => !stream.title.includes('👤 0'));
torrentStreams.forEach(stream => mochResults.forEach(mochResult => {
const cachedEntry = mochResult.mochStreams[`${stream.infoHash}@${stream.fileIdx}`];
const isCached = cachedEntry?.cached;
if (!isCached && isHealthyStreamForDebrid(seededStreams, stream)) {
streams.push({
name: `[${mochResult.moch.shortName} download] ${stream.name}`,
title: stream.title,
url: `${config.host}/${mochResult.moch.key}/${cachedEntry.url}/${streamFilename(stream)}`,
behaviorHints: stream.behaviorHints
})
}
}));
return streams;
}
function isHealthyStreamForDebrid(streams, stream) {
const isZeroSeeders = stream.title.includes('👤 0');
const is4kStream = stream.name.includes('4k');
const isNotEnoughOptions = streams.length <= 5;
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
const isZeroSeeders = stream.title.includes('👤 0');
const is4kStream = stream.name.includes('4k');
const isNotEnoughOptions = streams.length <= 5;
return !isZeroSeeders || is4kStream || isNotEnoughOptions;
}
function isInvalidToken(token, mochKey) {
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
return token.length < MIN_API_KEY_SYMBOLS || TOKEN_BLACKLIST.includes(`${mochKey}|${token}`);
}
function blackListToken(token, mochKey) {
const tokenKey = `${mochKey}|${token}`;
console.log(`Blacklisting invalid token: ${tokenKey}`)
TOKEN_BLACKLIST.push(tokenKey);
const tokenKey = `${mochKey}|${token}`;
console.log(`Blacklisting invalid token: ${tokenKey}`)
TOKEN_BLACKLIST.push(tokenKey);
}
function errorStreamResponse(mochKey, error, config) {
if (error === BadTokenError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
if (error === AccessDeniedError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
return undefined;
if (error === BadTokenError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
if (error === AccessDeniedError) {
return {
name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
};
}
return undefined;
}

View File

@@ -1,63 +1,63 @@
import * as repository from '../lib/repository.js';
import * as repository from '../lib/repository.js';
const METAHUB_URL = 'https://images.metahub.space'
export const BadTokenError = { code: 'BAD_TOKEN' }
export const AccessDeniedError = { code: 'ACCESS_DENIED' }
export function chunkArray(arr, size) {
return arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
return arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
}
export function streamFilename(stream) {
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const filename = titleParts.pop().split('/').pop();
return encodeURIComponent(filename)
const titleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const filename = titleParts.pop().split('/').pop();
return encodeURIComponent(filename)
}
export async function enrichMeta(itemMeta) {
const infoHashes = [...new Set([itemMeta.infoHash]
.concat(itemMeta.videos.map(video => video.infoHash))
.filter(infoHash => infoHash))];
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
if (files.length) {
return {
...itemMeta,
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
videos: itemMeta.videos.map(video => {
const file = files.find(file => sameFilename(video.title, file.title));
if (file?.imdbId) {
if (file.imdbSeason && file.imdbEpisode) {
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
video.season = file.imdbSeason;
video.episode = file.imdbEpisode;
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
} else {
video.id = file.imdbId;
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
}
const infoHashes = [...new Set([itemMeta.infoHash]
.concat(itemMeta.videos.map(video => video.infoHash))
.filter(infoHash => infoHash))];
const files = infoHashes.length ? await repository.getFiles(infoHashes).catch(() => []) : [];
const commonImdbId = itemMeta.infoHash && mostCommonValue(files.map(file => file.imdbId));
if (files.length) {
return {
...itemMeta,
logo: commonImdbId && `${METAHUB_URL}/logo/medium/${commonImdbId}/img`,
poster: commonImdbId && `${METAHUB_URL}/poster/medium/${commonImdbId}/img`,
background: commonImdbId && `${METAHUB_URL}/background/medium/${commonImdbId}/img`,
videos: itemMeta.videos.map(video => {
const file = files.find(file => sameFilename(video.title, file.title));
if (file?.imdbId) {
if (file.imdbSeason && file.imdbEpisode) {
video.id = `${file.imdbId}:${file.imdbSeason}:${file.imdbEpisode}`;
video.season = file.imdbSeason;
video.episode = file.imdbEpisode;
video.thumbnail = `https://episodes.metahub.space/${file.imdbId}/${video.season}/${video.episode}/w780.jpg`
} else {
video.id = file.imdbId;
video.thumbnail = `${METAHUB_URL}/background/small/${file.imdbId}/img`;
}
}
return video;
})
}
return video;
})
}
}
return itemMeta
return itemMeta
}
export function sameFilename(filename, expectedFilename) {
const offset = filename.length - expectedFilename.length;
for (let i = 0; i < expectedFilename.length; i++) {
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
return false;
const offset = filename.length - expectedFilename.length;
for (let i = 0; i < expectedFilename.length; i++) {
if (filename[offset + i] !== expectedFilename[i] && expectedFilename[i] !== '<27>') {
return false;
}
}
}
return true;
return true;
}
function mostCommonValue(array) {
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
return array.sort((a, b) => array.filter(v => v === a).length - array.filter(v => v === b).length).pop();
}

View File

@@ -9,178 +9,178 @@ import StaticResponse from './static.js';
const KEY = 'offcloud';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
.then(results => results.map(result => result.cachedItems))
.then(results => results.reduce((all, result) => all.concat(result), []))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Offcloud cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: isCached
};
return mochStreams;
}, {})
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
const hashBatches = chunkArray(streams.map(stream => stream.infoHash), 100);
const available = await Promise.all(hashBatches.map(hashes => OC.instant.cache(hashes)))
.then(results => results.map(result => result.cachedItems))
.then(results => results.reduce((all, result) => all.concat(result), []))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Offcloud cached torrent availability request:', error);
return undefined;
});
return available && streams
.reduce((mochStreams, stream) => {
const isCached = available.includes(stream.infoHash);
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${stream.fileIdx}`,
cached: isCached
};
return mochStreams;
}, {})
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
return OC.cloud.history()
.then(torrents => torrents)
.then(torrents => (torrents || [])
.map(torrent => ({
id: `${KEY}:${torrent.requestId}`,
type: Type.OTHER,
name: torrent.fileName
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const OC = new OffcloudClient(apiKey, options);
return OC.cloud.history()
.then(torrents => torrents)
.then(torrents => (torrents || [])
.map(torrent => ({
id: `${KEY}:${torrent.requestId}`,
type: Type.OTHER,
name: torrent.fileName
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
const torrents = await OC.cloud.history();
const torrent = torrents.find(torrent => torrent.requestId === itemId)
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
return _getFileUrls(OC, torrent)
.then(files => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: torrent.name,
infoHash: infoHash,
videos: files
.filter(file => isVideo(file))
.map((file, index) => ({
id: `${KEY}:${itemId}:${index}`,
title: file.split('/').pop(),
released: new Date(createDate.getTime() - index).toISOString(),
streams: [{ url: file }]
}))
}))
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
const torrents = await OC.cloud.history();
const torrent = torrents.find(torrent => torrent.requestId === itemId)
const infoHash = torrent && magnet.decode(torrent.originalLink).infoHash
const createDate = torrent ? new Date(torrent.createdOn) : new Date();
return _getFileUrls(OC, torrent)
.then(files => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: torrent.name,
infoHash: infoHash,
videos: files
.filter(file => isVideo(file))
.map((file, index) => ({
id: `${KEY}:${itemId}:${index}`,
title: file.split('/').pop(),
released: new Date(createDate.getTime() - index).toISOString(),
streams: [{ url: file }]
}))
}))
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
console.log(`Unrestricting Offcloud ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const OC = new OffcloudClient(apiKey, options);
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
});
return _resolve(OC, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (errorExpiredSubscriptionError(error)) {
console.log(`Access denied to Offcloud ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(OC, infoHash, cachedEntryInfo, fileIndex) {
const torrent = await _createOrFindTorrent(OC, infoHash)
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
.then(info => info.status || info);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent)) {
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
}
const torrent = await _createOrFindTorrent(OC, infoHash)
.then(info => info.requestId ? OC.cloud.status(info.requestId) : Promise.resolve(info))
.then(info => info.status || info);
if (torrent && statusReady(torrent)) {
return _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent)) {
console.log(`Downloading to Offcloud ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent)) {
console.log(`Retry failed download in Offcloud ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
return Promise.reject(`Failed Offcloud adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrent(OC, infoHash) {
return _findTorrent(OC, infoHash)
.catch(() => _createTorrent(OC, infoHash));
return _findTorrent(OC, infoHash)
.catch(() => _createTorrent(OC, infoHash));
}
async function _findTorrent(OC, infoHash) {
const torrents = await OC.cloud.history();
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await OC.cloud.history();
const foundTorrents = torrents.filter(torrent => torrent.originalLink.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _createTorrent(OC, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return OC.cloud.download(magnetLink)
const magnetLink = await getMagnetLink(infoHash);
return OC.cloud.download(magnetLink)
}
async function _retryCreateTorrent(OC, infoHash, cachedEntryInfo, fileIndex) {
const newTorrent = await _createTorrent(OC, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(OC, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(OC, infoHash, newTorrent, cachedEntryInfo, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _unrestrictLink(OC, infoHash, torrent, cachedEntryInfo, fileIndex) {
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();
const targetFileName = decodeURIComponent(cachedEntryInfo);
const files = await _getFileUrls(OC, torrent)
const targetFile = files.find(file => file.includes(`/${torrent.requestId}/${fileIndex}/`))
|| files.find(file => sameFilename(targetFileName, file.split('/').pop()))
|| files.find(file => isVideo(file))
|| files.pop();
if (!targetFile) {
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
return targetFile;
if (!targetFile) {
return Promise.reject(`No Offcloud links found for index ${fileIndex} in: ${JSON.stringify(torrent)}`);
}
console.log(`Unrestricted Offcloud ${infoHash} [${fileIndex}] to ${targetFile}`);
return targetFile;
}
async function _getFileUrls(OC, torrent) {
return OC.cloud.explore(torrent.requestId)
.catch(error => {
if (error === 'Bad archive') {
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
}
throw error;
})
return OC.cloud.explore(torrent.requestId)
.catch(error => {
if (error === 'Bad archive') {
return [`https://${torrent.server}.offcloud.com/cloud/download/${torrent.requestId}/${torrent.fileName}`];
}
throw error;
})
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 10000 };
}
export function toCommonError(error) {
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
return BadTokenError;
}
return undefined;
if (error?.error === 'NOAUTH' || error?.message?.startsWith('Cannot read property')) {
return BadTokenError;
}
return undefined;
}
function statusDownloading(torrent) {
return ['downloading', 'created'].includes(torrent.status);
return ['downloading', 'created'].includes(torrent.status);
}
function statusError(torrent) {
return ['error', 'canceled'].includes(torrent.status);
return ['error', 'canceled'].includes(torrent.status);
}
function statusReady(torrent) {
return torrent.status === 'downloaded';
return torrent.status === 'downloaded';
}
function errorExpiredSubscriptionError(error) {
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
return error?.includes && (error.includes('not_available') || error.includes('NOAUTH') || error.includes('premium membership'));
}

View File

@@ -9,187 +9,187 @@ import StaticResponse from './static.js';
const KEY = 'premiumize';
export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return Promise.all(chunkArray(streams, 100)
.map(chunkedStreams => _getCachedStreams(PM, apiKey, chunkedStreams)))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}));
}
async function _getCachedStreams(PM, apiKey, streams) {
const hashes = streams.map(stream => stream.infoHash);
return PM.cache.check(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined;
})
.then(available => streams
.reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available?.response[index]
};
return mochStreams;
}, {}));
const hashes = streams.map(stream => stream.infoHash);
return PM.cache.check(hashes)
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
console.warn('Failed Premiumize cached torrent availability request:', error);
return undefined;
})
.then(available => streams
.reduce((mochStreams, stream, index) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: available?.response[index]
};
return mochStreams;
}, {}));
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return PM.folder.list()
.then(response => response.content)
.then(torrents => (torrents || [])
.filter(torrent => torrent && torrent.type === 'folder')
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
if (offset > 0) {
return [];
}
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return PM.folder.list()
.then(response => response.content)
.then(torrents => (torrents || [])
.filter(torrent => torrent && torrent.type === 'folder')
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.name
})));
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
const rootFolder = await PM.folder.list(itemId, null);
const infoHash = await _findInfoHash(PM, itemId);
return getFolderContents(PM, itemId, ip)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: rootFolder.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at * 1000 - index).toISOString(),
streams: [{ url: file.link || file.stream_link }]
}))
}))
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
const rootFolder = await PM.folder.list(itemId, null);
const infoHash = await _findInfoHash(PM, itemId);
return getFolderContents(PM, itemId, ip)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: rootFolder.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at * 1000 - index).toISOString(),
streams: [{ url: file.link || file.stream_link }]
}))
}))
}
async function getFolderContents(PM, itemId, ip, folderPrefix = '') {
return PM.folder.list(itemId, null, ip)
.then(response => response.content)
.then(contents => Promise.all(contents
.filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name))
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
return PM.folder.list(itemId, null, ip)
.then(response => response.content)
.then(contents => Promise.all(contents
.filter(content => content.type === 'folder')
.map(content => getFolderContents(PM, content.id, ip, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.type === 'file' && isVideo(content.name))
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
.catch(error => {
if (error?.message?.includes('Account not premium.')) {
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
});
console.log(`Unrestricting Premiumize ${infoHash} [${fileIndex}] for IP ${ip} from browser=${isBrowser}`);
const options = await getDefaultOptions();
const PM = new PremiumizeClient(apiKey, options);
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser)
.catch(() => _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser))
.catch(error => {
if (error?.message?.includes('Account not premium.')) {
console.log(`Access denied to Premiumize ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolve(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser) {
const torrent = await _createOrFindTorrent(PM, infoHash);
if (torrent && statusReady(torrent.status)) {
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
const torrent = await _createOrFindTorrent(PM, infoHash);
if (torrent && statusReady(torrent.status)) {
return _getCachedLink(PM, infoHash, cachedEntryInfo, fileIndex, ip, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Premiumize ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(PM, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject(`Failed Premiumize adding torrent ${JSON.stringify(torrent)}`);
}
async function _getCachedLink(PM, infoHash, encodedFileName, fileIndex, ip, isBrowser) {
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path));
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos.sort((a, b) => b.size - a.size)[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
const cachedTorrent = await PM.transfer.directDownload(magnet.encode({ infoHash }), ip);
if (cachedTorrent?.content?.length) {
const targetFileName = decodeURIComponent(encodedFileName);
const videos = cachedTorrent.content.filter(file => isVideo(file.path)).sort((a, b) => b.size - a.size);
const targetVideo = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(video.path, targetFileName))
: videos[0];
if (!targetVideo && videos.every(video => isArchive(video.path))) {
console.log(`Only Premiumize archive is available for [${infoHash}] ${fileIndex}`)
return StaticResponse.FAILED_RAR;
}
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
const unrestrictedLink = streamLink || targetVideo.link;
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
const streamLink = isBrowser && targetVideo.transcode_status === 'finished' && targetVideo.stream_link;
const unrestrictedLink = streamLink || targetVideo.link;
console.log(`Unrestricted Premiumize ${infoHash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
}
return Promise.reject('No cached entry found');
return Promise.reject('No cached entry found');
}
async function _createOrFindTorrent(PM, infoHash) {
return _findTorrent(PM, infoHash)
.catch(() => _createTorrent(PM, infoHash));
return _findTorrent(PM, infoHash)
.catch(() => _createTorrent(PM, infoHash));
}
async function _findTorrent(PM, infoHash) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrents = torrents.filter(torrent => torrent.src.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.statusCode));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
return foundTorrent || Promise.reject('No recent torrent found');
}
async function _findInfoHash(PM, itemId) {
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
const torrents = await PM.transfer.list().then(response => response.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === itemId || `${torrent.folder_id}` === itemId);
return foundTorrent?.src ? magnet.decode(foundTorrent.src).infoHash : undefined;
}
async function _createTorrent(PM, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
const magnetLink = await getMagnetLink(infoHash);
return PM.transfer.create(magnetLink).then(() => _findTorrent(PM, infoHash));
}
async function _retryCreateTorrent(PM, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
return newTorrent && statusReady(newTorrent.status)
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(PM, infoHash).then(() => _findTorrent(PM, infoHash));
return newTorrent && statusReady(newTorrent.status)
? _getCachedLink(PM, infoHash, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
export function toCommonError(error) {
if (error && error.message === 'Not logged in.') {
return BadTokenError;
}
return undefined;
if (error && error.message === 'Not logged in.') {
return BadTokenError;
}
return undefined;
}
function statusError(status) {
return ['deleted', 'error', 'timeout'].includes(status);
return ['deleted', 'error', 'timeout'].includes(status);
}
function statusDownloading(status) {
return ['waiting', 'queued', 'running'].includes(status);
return ['waiting', 'queued', 'running'].includes(status);
}
function statusReady(status) {
return ['finished', 'seeding'].includes(status);
return ['finished', 'seeding'].includes(status);
}
async function getDefaultOptions(ip) {
return { timeout: 5000 };
return { timeout: 5000 };
}

View File

@@ -11,205 +11,205 @@ const PutioAPI = PutioClient.default;
const KEY = 'putio';
export async function getCachedStreams(streams, apiKey) {
return streams
.reduce((mochStreams, stream) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
return mochStreams;
}, {});
return streams
.reduce((mochStreams, stream) => {
const streamTitleParts = stream.title.replace(/\n👤.*/s, '').split('\n');
const fileName = streamTitleParts[streamTitleParts.length - 1];
const fileIndex = streamTitleParts.length === 2 ? stream.fileIdx : null;
const encodedFileName = encodeURIComponent(fileName);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/${encodedFileName}/${fileIndex}`,
cached: false
};
return mochStreams;
}, {});
}
export async function getCatalog(apiKey, offset = 0) {
if (offset > 0) {
return [];
}
const Putio = createPutioAPI(apiKey)
return Putio.Files.Query(0)
.then(response => response?.body?.files)
.then(files => (files || [])
.map(file => ({
id: `${KEY}:${file.id}`,
type: Type.OTHER,
name: file.name
})));
if (offset > 0) {
return [];
}
const Putio = createPutioAPI(apiKey)
return Putio.Files.Query(0)
.then(response => response?.body?.files)
.then(files => (files || [])
.map(file => ({
id: `${KEY}:${file.id}`,
type: Type.OTHER,
name: file.name
})));
}
export async function getItemMeta(itemId, apiKey) {
const Putio = createPutioAPI(apiKey)
const infoHash = await _findInfoHash(Putio, itemId)
return getFolderContents(Putio, itemId)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: contents.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at).toISOString(),
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
}))
}))
const Putio = createPutioAPI(apiKey)
const infoHash = await _findInfoHash(Putio, itemId)
return getFolderContents(Putio, itemId)
.then(contents => ({
id: `${KEY}:${itemId}`,
type: Type.OTHER,
name: contents.name,
infoHash: infoHash,
videos: contents
.map((file, index) => ({
id: `${KEY}:${file.id}:${index}`,
title: file.name,
released: new Date(file.created_at).toISOString(),
streams: [{ url: `${apiKey}/null/null/${file.id}` }]
}))
}))
}
async function getFolderContents(Putio, itemId, folderPrefix = '') {
return await Putio.Files.Query(itemId)
.then(response => response?.body)
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
.then(contents => Promise.all(contents
.filter(content => content.file_type === 'FOLDER')
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.file_type === 'VIDEO')
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
return await Putio.Files.Query(itemId)
.then(response => response?.body)
.then(body => body?.files?.length ? body.files : [body?.parent].filter(x => x))
.then(contents => Promise.all(contents
.filter(content => content.file_type === 'FOLDER')
.map(content => getFolderContents(Putio, content.id, [folderPrefix, content.name].join('/'))))
.then(otherContents => otherContents.reduce((a, b) => a.concat(b), []))
.then(otherContents => contents
.filter(content => content.file_type === 'VIDEO')
.map(content => ({ ...content, name: [folderPrefix, content.name].join('/') }))
.concat(otherContents)));
}
export async function resolve({ ip, apiKey, infoHash, cachedEntryInfo, fileIndex }) {
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
const Putio = createPutioAPI(apiKey)
console.log(`Unrestricting Putio ${infoHash} [${fileIndex}]`);
const Putio = createPutioAPI(apiKey)
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (error?.data?.status_code === 401) {
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
});
return _resolve(Putio, infoHash, cachedEntryInfo, fileIndex)
.catch(error => {
if (error?.data?.status_code === 401) {
console.log(`Access denied to Putio ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
return Promise.reject(`Failed Putio adding torrent ${JSON.stringify(error.data || error)}`);
});
}
async function _resolve(Putio, infoHash, cachedEntryInfo, fileIndex) {
if (infoHash === 'null') {
return _unrestrictVideo(Putio, fileIndex);
}
const torrent = await _createOrFindTorrent(Putio, infoHash);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject("Failed Putio adding torrent");
if (infoHash === 'null') {
return _unrestrictVideo(Putio, fileIndex);
}
const torrent = await _createOrFindTorrent(Putio, infoHash);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(Putio, torrent, cachedEntryInfo, fileIndex);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to Putio ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusError(torrent.status)) {
console.log(`Retrying downloading to Putio ${infoHash} [${fileIndex}]...`);
return _retryCreateTorrent(Putio, infoHash, cachedEntryInfo, fileIndex);
}
return Promise.reject("Failed Putio adding torrent");
}
async function _createOrFindTorrent(Putio, infoHash) {
return _findTorrent(Putio, infoHash)
.catch(() => _createTorrent(Putio, infoHash));
return _findTorrent(Putio, infoHash)
.catch(() => _createTorrent(Putio, infoHash));
}
async function _retryCreateTorrent(Putio, infoHash, encodedFileName, fileIndex) {
const newTorrent = await _createTorrent(Putio, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
const newTorrent = await _createTorrent(Putio, infoHash);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(Putio, newTorrent, encodedFileName, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _findTorrent(Putio, infoHash) {
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
if (foundTorrents && !foundTorrents.userfile_exists) {
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
}
return foundTorrent || Promise.reject('No recent torrent found in Putio');
const torrents = await Putio.Transfers.Query().then(response => response.data.transfers);
const foundTorrents = torrents.filter(torrent => torrent.source.toLowerCase().includes(infoHash));
const nonFailedTorrent = foundTorrents.find(torrent => !statusError(torrent.status));
const foundTorrent = nonFailedTorrent || foundTorrents[0];
if (foundTorrents && !foundTorrents.userfile_exists) {
return await Putio.Transfers.Cancel(foundTorrents.id).then(() => Promise.reject())
}
return foundTorrent || Promise.reject('No recent torrent found in Putio');
}
async function _findInfoHash(Putio, fileId) {
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
const torrents = await Putio.Transfers.Query().then(response => response?.data?.transfers);
const foundTorrent = torrents.find(torrent => `${torrent.file_id}` === fileId);
return foundTorrent?.source ? decode(foundTorrent.source).infoHash : undefined;
}
async function _createTorrent(Putio, infoHash) {
const magnetLink = await getMagnetLink(infoHash);
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
return Putio.Transfers.Add({ url: magnetLink })
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
const magnetLink = await getMagnetLink(infoHash);
// Add the torrent and then delay for 3 secs for putio to process it and then check it's status.
return Putio.Transfers.Add({ url: magnetLink })
.then(response => _getNewTorrent(Putio, response.data.transfer.id));
}
async function _getNewTorrent(Putio, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return Putio.Transfers.Get(torrentId)
.then(response => response.data.transfer)
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
: torrent);
return Putio.Transfers.Get(torrentId)
.then(response => response.data.transfer)
.then(torrent => statusProcessing(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _getNewTorrent(Putio, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(Putio, torrent, encodedFileName, fileIndex) {
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
return _unrestrictVideo(Putio, targetVideo.id);
const targetVideo = await _getTargetFile(Putio, torrent, encodedFileName, fileIndex);
return _unrestrictVideo(Putio, targetVideo.id);
}
async function _unrestrictVideo(Putio, videoId) {
const response = await Putio.File.GetStorageURL(videoId);
const downloadUrl = response.data.url
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
return downloadUrl;
const response = await Putio.File.GetStorageURL(videoId);
const downloadUrl = response.data.url
console.log(`Unrestricted Putio [${videoId}] to ${downloadUrl}`);
return downloadUrl;
}
async function _getTargetFile(Putio, torrent, encodedFileName, fileIndex) {
const targetFileName = decodeURIComponent(encodedFileName);
let targetFile;
let files = await _getFiles(Putio, torrent.file_id);
let videos = [];
const targetFileName = decodeURIComponent(encodedFileName);
let targetFile;
let files = await _getFiles(Putio, torrent.file_id);
let videos = [];
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name)));
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos.sort((a, b) => b.size - a.size)[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))
: [];
}
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
while (!targetFile && files.length) {
const folders = files.filter(file => file.file_type === 'FOLDER');
videos = videos.concat(files.filter(file => isVideo(file.name))).sort((a, b) => b.size - a.size);
// when specific file index is defined search by filename
// when it's not defined find all videos and take the largest one
targetFile = Number.isInteger(fileIndex)
? videos.find(video => sameFilename(targetFileName, video.name))
: !folders.length && videos[0];
files = !targetFile
? await Promise.all(folders.map(folder => _getFiles(Putio, folder.id)))
.then(results => results.reduce((a, b) => a.concat(b), []))
: [];
}
return targetFile || Promise.reject(`No target file found for Putio [${torrent.hash}] ${targetFileName}`);
}
async function _getFiles(Putio, fileId) {
const response = await Putio.Files.Query(fileId)
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
return response.data.files.length
? response.data.files
: [response.data.parent];
const response = await Putio.Files.Query(fileId)
.catch(error => Promise.reject({ ...error.data, path: error.request.path }));
return response.data.files.length
? response.data.files
: [response.data.parent];
}
function createPutioAPI(apiKey) {
const clientId = apiKey.replace(/@.*/, '');
const token = apiKey.replace(/.*@/, '');
const Putio = new PutioAPI({ clientID: clientId });
Putio.setToken(token);
return Putio;
const clientId = apiKey.replace(/@.*/, '');
const token = apiKey.replace(/.*@/, '');
const Putio = new PutioAPI({ clientID: clientId });
Putio.setToken(token);
return Putio;
}
function statusError(status) {
return ['ERROR'].includes(status);
return ['ERROR'].includes(status);
}
function statusDownloading(status) {
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
return ['WAITING', 'IN_QUEUE', 'DOWNLOADING'].includes(status);
}
function statusProcessing(status) {
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
return ['WAITING', 'IN_QUEUE', 'COMPLETING'].includes(status);
}
function statusReady(status) {
return ['COMPLETED', 'SEEDING'].includes(status);
return ['COMPLETED', 'SEEDING'].includes(status);
}

View File

@@ -15,385 +15,385 @@ const KEY = 'realdebrid';
const DEBRID_DOWNLOADS = 'Downloads';
export async function getCachedStreams(streams, apiKey) {
const hashes = streams.map(stream => stream.infoHash);
const available = await _getInstantAvailable(hashes, apiKey);
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[stream.infoHash] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
return mochStreams;
}, {})
const hashes = streams.map(stream => stream.infoHash);
const available = await _getInstantAvailable(hashes, apiKey);
return available && streams
.reduce((mochStreams, stream) => {
const cachedEntry = available[stream.infoHash];
const cachedIds = _getCachedFileIds(stream.fileIdx, cachedEntry);
mochStreams[`${stream.infoHash}@${stream.fileIdx}`] = {
url: `${apiKey}/${stream.infoHash}/null/${stream.fileIdx}`,
cached: !!cachedIds.length
};
return mochStreams;
}, {})
}
async function _getInstantAvailable(hashes, apiKey, retries = 3, maxChunkSize = 150) {
const cachedResults = await getCachedAvailabilityResults(hashes);
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
if (!missingHashes.length) {
return cachedResults
}
const options = await getDefaultOptions();
const RD = new RealDebridClient(apiKey, options);
const hashBatches = chunkArray(missingHashes, maxChunkSize)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => {
const cachedResults = await getCachedAvailabilityResults(hashes);
const missingHashes = hashes.filter(infoHash => !cachedResults[infoHash]);
if (!missingHashes.length) {
return cachedResults
}
const options = await getDefaultOptions();
const RD = new RealDebridClient(apiKey, options);
const hashBatches = chunkArray(missingHashes, maxChunkSize)
return Promise.all(hashBatches.map(batch => RD.torrents.instantAvailability(batch)
.then(response => {
if (typeof response !== 'object') {
return Promise.reject(new Error('RD returned non JSON response: ' + response));
return Promise.reject(new Error('RD returned non JSON response: ' + response));
}
return processAvailabilityResults(response);
})))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
if (!error && maxChunkSize !== 1) {
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
}
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
return _getInstantAvailable(hashes, apiKey, retries - 1);
}
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
return undefined;
});
})))
.then(results => results.reduce((all, result) => Object.assign(all, result), {}))
.then(results => cacheAvailabilityResults(results))
.then(results => Object.assign(cachedResults, results))
.catch(error => {
if (toCommonError(error)) {
return Promise.reject(error);
}
if (!error && maxChunkSize !== 1) {
// sometimes due to large response size RD responds with an empty body. Reduce chunk size to reduce body
console.log(`Reducing chunk size for availability request: ${hashes[0]}`);
return _getInstantAvailable(hashes, apiKey, retries - 1, Math.ceil(maxChunkSize / 10));
}
if (retries > 0 && NON_BLACKLIST_ERRORS.some(v => error?.message?.includes(v))) {
return _getInstantAvailable(hashes, apiKey, retries - 1);
}
console.warn(`Failed RealDebrid cached [${hashes[0]}] torrent availability request:`, error.message);
return undefined;
});
}
function processAvailabilityResults(availabilityResults) {
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
const processedResults = {};
Object.entries(availabilityResults)
.forEach(([infoHash, hosterResults]) => processedResults[infoHash] = getCachedIds(hosterResults));
return processedResults;
}
function getCachedIds(hosterResults) {
if (!hosterResults || Array.isArray(hosterResults)) {
return [];
}
// if not all cached files are videos, then the torrent will be zipped to a rar
return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), [])
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length)
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
if (!hosterResults || Array.isArray(hosterResults)) {
return [];
}
// if not all cached files are videos, then the torrent will be zipped to a rar
return Object.values(hosterResults)
.reduce((a, b) => a.concat(b), [])
.filter(cached => Object.keys(cached).length && Object.values(cached).every(file => isVideo(file.filename)))
.map(cached => Object.keys(cached))
.sort((a, b) => b.length - a.length)
.filter((cached, index, array) => index === 0 || cached.some(id => !array[0].includes(id)));
}
function _getCachedFileIds(fileIndex, cachedResults) {
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
if (!cachedResults || !Array.isArray(cachedResults)) {
return [];
}
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
const cachedIds = Number.isInteger(fileIndex)
? cachedResults.find(ids => ids.includes(`${fileIndex + 1}`))
: cachedResults[0];
return cachedIds || [];
}
export async function getCatalog(apiKey, offset, ip) {
if (offset > 0) {
return [];
}
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const downloadsMeta = {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS
};
const torrentMetas = await _getAllTorrents(RD)
.then(torrents => Array.isArray(torrents) ? torrents : [])
.then(torrents => torrents
.filter(torrent => torrent && statusReady(torrent.status))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
return [downloadsMeta].concat(torrentMetas)
if (offset > 0) {
return [];
}
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const downloadsMeta = {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS
};
const torrentMetas = await _getAllTorrents(RD)
.then(torrents => Array.isArray(torrents) ? torrents : [])
.then(torrents => torrents
.filter(torrent => torrent && statusReady(torrent.status))
.map(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename
})));
return [downloadsMeta].concat(torrentMetas)
}
export async function getItemMeta(itemId, apiKey, ip) {
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
if (itemId === DEBRID_DOWNLOADS) {
const videos = await _getAllDownloads(RD)
.then(downloads => downloads
.map(download => ({
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
// infoHash: allTorrents
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
// .map(torrent => torrent.hash.toLowerCase())[0],
title: download.filename,
released: new Date(download.generated).toISOString(),
streams: [{ url: download.download }]
})));
return {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS,
videos: videos
};
}
return _getTorrentInfo(RD, itemId)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.files
.filter(file => file.selected)
.filter(file => isVideo(file.path))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${file.id}`,
title: file.path,
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
}))
}))
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
if (itemId === DEBRID_DOWNLOADS) {
const videos = await _getAllDownloads(RD)
.then(downloads => downloads
.map(download => ({
id: `${KEY}:${DEBRID_DOWNLOADS}:${download.id}`,
// infoHash: allTorrents
// .filter(torrent => (torrent.links || []).find(link => link === download.link))
// .map(torrent => torrent.hash.toLowerCase())[0],
title: download.filename,
released: new Date(download.generated).toISOString(),
streams: [{ url: download.download }]
})));
return {
id: `${KEY}:${DEBRID_DOWNLOADS}`,
type: Type.OTHER,
name: DEBRID_DOWNLOADS,
videos: videos
};
}
return _getTorrentInfo(RD, itemId)
.then(torrent => ({
id: `${KEY}:${torrent.id}`,
type: Type.OTHER,
name: torrent.filename,
infoHash: torrent.hash.toLowerCase(),
videos: torrent.files
.filter(file => file.selected)
.filter(file => isVideo(file.path))
.map((file, index) => ({
id: `${KEY}:${torrent.id}:${file.id}`,
title: file.path,
released: new Date(new Date(torrent.added).getTime() - index).toISOString(),
streams: [{ url: `${apiKey}/${torrent.hash.toLowerCase()}/null/${file.id - 1}` }]
}))
}))
}
async function _getAllTorrents(RD, page = 1) {
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
? _getAllTorrents(RD, page + 1)
.then(nextTorrents => torrents.concat(nextTorrents))
.catch(() => torrents)
: torrents)
return RD.torrents.get(page - 1, page, CATALOG_PAGE_SIZE)
.then(torrents => torrents && torrents.length === CATALOG_PAGE_SIZE && page < CATALOG_MAX_PAGE
? _getAllTorrents(RD, page + 1)
.then(nextTorrents => torrents.concat(nextTorrents))
.catch(() => torrents)
: torrents)
}
async function _getAllDownloads(RD, page = 1) {
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
return RD.downloads.get(page - 1, page, CATALOG_PAGE_SIZE);
}
export async function resolve({ ip, isBrowser, apiKey, infoHash, fileIndex }) {
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
console.log(`Unrestricting RealDebrid ${infoHash} [${fileIndex}]`);
const options = await getDefaultOptions(ip);
const RD = new RealDebridClient(apiKey, options);
const cachedFileIds = await _resolveCachedFileIds(infoHash, fileIndex, apiKey);
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
.catch(error => {
if (accessDeniedError(error)) {
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
if (infringingFile(error)) {
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_INFRINGEMENT;
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
});
return _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser)
.catch(error => {
if (accessDeniedError(error)) {
console.log(`Access denied to RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_ACCESS;
}
if (infringingFile(error)) {
console.log(`Infringing file removed from RealDebrid ${infoHash} [${fileIndex}]`);
return StaticResponse.FAILED_INFRINGEMENT;
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(error)}`);
});
}
async function _resolveCachedFileIds(infoHash, fileIndex, apiKey) {
const available = await _getInstantAvailable([infoHash], apiKey);
const cachedEntry = available?.[infoHash];
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
return cachedIds?.join(',');
const available = await _getInstantAvailable([infoHash], apiKey);
const cachedEntry = available?.[infoHash];
const cachedIds = _getCachedFileIds(fileIndex, cachedEntry);
return cachedIds?.join(',');
}
async function _resolve(RD, infoHash, cachedFileIds, fileIndex, isBrowser) {
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
const torrent = await _getTorrentInfo(RD, torrentId);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusMagnetError(torrent.status)) {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
return StaticResponse.FAILED_OPENING;
} else if (torrent && statusError(torrent.status)) {
return _retryCreateTorrent(RD, infoHash, fileIndex);
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
return _selectTorrentFiles(RD, torrent)
.then(() => {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING
})
.catch(error => {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
return StaticResponse.FAILED_OPENING;
});
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
const torrentId = await _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex);
const torrent = await _getTorrentInfo(RD, torrentId);
if (torrent && statusReady(torrent.status)) {
return _unrestrictLink(RD, torrent, fileIndex, isBrowser);
} else if (torrent && statusDownloading(torrent.status)) {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING;
} else if (torrent && statusMagnetError(torrent.status)) {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}] due to magnet error`);
return StaticResponse.FAILED_OPENING;
} else if (torrent && statusError(torrent.status)) {
return _retryCreateTorrent(RD, infoHash, fileIndex);
} else if (torrent && (statusWaitingSelection(torrent.status) || statusOpening(torrent.status))) {
console.log(`Trying to select files on RealDebrid ${infoHash} [${fileIndex}]...`);
return _selectTorrentFiles(RD, torrent)
.then(() => {
console.log(`Downloading to RealDebrid ${infoHash} [${fileIndex}]...`);
return StaticResponse.DOWNLOADING
})
.catch(error => {
console.log(`Failed RealDebrid opening torrent ${infoHash} [${fileIndex}]:`, error);
return StaticResponse.FAILED_OPENING;
});
}
return Promise.reject(`Failed RealDebrid adding torrent ${JSON.stringify(torrent)}`);
}
async function _createOrFindTorrentId(RD, infoHash, cachedFileIds, fileIndex) {
return _findTorrent(RD, infoHash, fileIndex)
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
return _findTorrent(RD, infoHash, fileIndex)
.catch(() => _createTorrentId(RD, infoHash, cachedFileIds));
}
async function _findTorrent(RD, infoHash, fileIndex) {
const torrents = await RD.torrents.get(0, 1) || [];
const foundTorrents = torrents
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
.filter(torrent => !statusError(torrent.status));
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
return foundTorrent?.id || Promise.reject('No recent torrent found');
const torrents = await RD.torrents.get(0, 1) || [];
const foundTorrents = torrents
.filter(torrent => torrent.hash.toLowerCase() === infoHash)
.filter(torrent => !statusError(torrent.status));
const foundTorrent = await _findBestFitTorrent(RD, foundTorrents, fileIndex);
return foundTorrent?.id || Promise.reject('No recent torrent found');
}
async function _findBestFitTorrent(RD, torrents, fileIndex) {
if (torrents.length === 1) {
return torrents[0];
}
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
const bestFitTorrents = torrentInfos
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
.sort((a, b) => b.links.length - a.links.length);
return bestFitTorrents[0] || torrents[0];
if (torrents.length === 1) {
return torrents[0];
}
const torrentInfos = await Promise.all(torrents.map(torrent => _getTorrentInfo(RD, torrent.id)));
const bestFitTorrents = torrentInfos
.filter(torrent => torrent.files.find(f => f.id === fileIndex + 1 && f.selected))
.sort((a, b) => b.links.length - a.links.length);
return bestFitTorrents[0] || torrents[0];
}
async function _getTorrentInfo(RD, torrentId) {
if (!torrentId || typeof torrentId === 'object') {
return torrentId || Promise.reject('No RealDebrid torrentId provided')
}
return RD.torrents.info(torrentId);
if (!torrentId || typeof torrentId === 'object') {
return torrentId || Promise.reject('No RealDebrid torrentId provided')
}
return RD.torrents.info(torrentId);
}
async function _createTorrentId(RD, infoHash, cachedFileIds) {
const magnetLink = await getMagnetLink(infoHash);
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
}
return addedMagnet.id;
const magnetLink = await getMagnetLink(infoHash);
const addedMagnet = await RD.torrents.addMagnet(magnetLink);
if (cachedFileIds && !['null', 'undefined'].includes(cachedFileIds)) {
await RD.torrents.selectFiles(addedMagnet.id, cachedFileIds);
}
return addedMagnet.id;
}
async function _recreateTorrentId(RD, infoHash, fileIndex) {
const newTorrentId = await _createTorrentId(RD, infoHash);
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
return newTorrentId;
const newTorrentId = await _createTorrentId(RD, infoHash);
await _selectTorrentFiles(RD, { id: newTorrentId }, fileIndex);
return newTorrentId;
}
async function _retryCreateTorrent(RD, infoHash, fileIndex) {
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(RD, newTorrent, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
console.log(`Retry failed download in RealDebrid ${infoHash} [${fileIndex}]...`);
const newTorrentId = await _recreateTorrentId(RD, infoHash, fileIndex);
const newTorrent = await _getTorrentInfo(RD, newTorrentId);
return newTorrent && statusReady(newTorrent.status)
? _unrestrictLink(RD, newTorrent, fileIndex)
: StaticResponse.FAILED_DOWNLOAD;
}
async function _selectTorrentFiles(RD, torrent, fileIndex) {
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
if (torrent?.files && statusWaitingSelection(torrent.status)) {
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
.filter(file => isVideo(file.path))
.filter(file => file.bytes > MIN_SIZE)
.map(file => file.id)
.join(',');
return RD.torrents.selectFiles(torrent.id, videoFileIds);
}
return Promise.reject('Failed RealDebrid torrent file selection')
torrent = statusWaitingSelection(torrent.status) ? torrent : await _openTorrent(RD, torrent.id);
if (torrent?.files && statusWaitingSelection(torrent.status)) {
const videoFileIds = Number.isInteger(fileIndex) ? `${fileIndex + 1}` : torrent.files
.filter(file => isVideo(file.path))
.filter(file => file.bytes > MIN_SIZE)
.map(file => file.id)
.join(',');
return RD.torrents.selectFiles(torrent.id, videoFileIds);
}
return Promise.reject('Failed RealDebrid torrent file selection')
}
async function _openTorrent(RD, torrentId, pollCounter = 0, pollRate = 2000, maxPollNumber = 15) {
return _getTorrentInfo(RD, torrentId)
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
: torrent);
return _getTorrentInfo(RD, torrentId)
.then(torrent => torrent && statusOpening(torrent.status) && pollCounter < maxPollNumber
? delay(pollRate).then(() => _openTorrent(RD, torrentId, pollCounter + 1))
: torrent);
}
async function _unrestrictLink(RD, torrent, fileIndex, isBrowser) {
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
if (!targetFile.selected) {
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
return StaticResponse.DOWNLOADING;
}
const targetFile = torrent.files.find(file => file.id === fileIndex + 1)
|| torrent.files.filter(file => file.selected).sort((a, b) => b.bytes - a.bytes)[0];
if (!targetFile.selected) {
console.log(`Target RealDebrid file is not downloaded: ${JSON.stringify(targetFile)}`);
await _recreateTorrentId(RD, torrent.hash.toLowerCase(), fileIndex);
return StaticResponse.DOWNLOADING;
}
const selectedFiles = torrent.files.filter(file => file.selected);
const fileLink = torrent.links.length === 1
? torrent.links[0]
: torrent.links[selectedFiles.indexOf(targetFile)];
const selectedFiles = torrent.files.filter(file => file.selected);
const fileLink = torrent.links.length === 1
? torrent.links[0]
: torrent.links[selectedFiles.indexOf(targetFile)];
if (!fileLink?.length) {
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
if (!fileLink?.length) {
console.log(`No RealDebrid links found for ${torrent.hash} [${fileIndex}]`);
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
return _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser);
}
async function _unrestrictFileLink(RD, fileLink, torrent, fileIndex, isBrowser) {
return RD.unrestrict.link(fileLink)
.then(response => {
if (isArchive(response.download)) {
if (torrent.files.filter(file => file.selected).length > 1) {
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return StaticResponse.FAILED_RAR;
}
// if (isBrowser && response.streamable) {
// return RD.streaming.transcode(response.id)
// .then(streamResponse => streamResponse.apple.full)
// }
return response.download;
})
.then(unrestrictedLink => {
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
})
.catch(error => {
if (error.code === 19) {
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
}
return Promise.reject(error);
});
return RD.unrestrict.link(fileLink)
.then(response => {
if (isArchive(response.download)) {
if (torrent.files.filter(file => file.selected).length > 1) {
return _retryCreateTorrent(RD, torrent.hash, fileIndex)
}
return StaticResponse.FAILED_RAR;
}
// if (isBrowser && response.streamable) {
// return RD.streaming.transcode(response.id)
// .then(streamResponse => streamResponse.apple.full)
// }
return response.download;
})
.then(unrestrictedLink => {
console.log(`Unrestricted RealDebrid ${torrent.hash} [${fileIndex}] to ${unrestrictedLink}`);
return unrestrictedLink;
})
.catch(error => {
if (error.code === 19) {
return _retryCreateTorrent(RD, torrent.hash.toLowerCase(), fileIndex);
}
return Promise.reject(error);
});
}
export function toCommonError(error) {
if (error && error.code === 8) {
return BadTokenError;
}
if (error && accessDeniedError(error)) {
return AccessDeniedError;
}
return undefined;
if (error && error.code === 8) {
return BadTokenError;
}
if (error && accessDeniedError(error)) {
return AccessDeniedError;
}
return undefined;
}
function statusError(status) {
return ['error', 'magnet_error'].includes(status);
return ['error', 'magnet_error'].includes(status);
}
function statusMagnetError(status) {
return status === 'magnet_error';
return status === 'magnet_error';
}
function statusOpening(status) {
return status === 'magnet_conversion';
return status === 'magnet_conversion';
}
function statusWaitingSelection(status) {
return status === 'waiting_files_selection';
return status === 'waiting_files_selection';
}
function statusDownloading(status) {
return ['downloading', 'uploading', 'queued'].includes(status);
return ['downloading', 'uploading', 'queued'].includes(status);
}
function statusReady(status) {
return ['downloaded', 'dead'].includes(status);
return ['downloaded', 'dead'].includes(status);
}
function accessDeniedError(error) {
return [9, 20].includes(error?.code);
return [9, 20].includes(error?.code);
}
function infringingFile(error) {
return error && error.code === 35;
return error && error.code === 35;
}
async function getDefaultOptions(ip) {
return { ip, timeout: 10000 };
return { ip, timeout: 15000 };
}

View File

@@ -15,7 +15,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .

View File

@@ -16,14 +16,13 @@ public static class DebridMetaToTorrentMeta
foreach (var metadataEntry in Metadata.Where(m => Filetypes.VideoFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
{
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
var file = new TorrentFile
{
ImdbId = ImdbId,
KitsuId = 0,
InfoHash = torrent.InfoHash,
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
FileIndex = validFileIndex ? fileIndex : 0,
Title = metadataEntry.Value.Filename,
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
};
@@ -67,14 +66,13 @@ public static class DebridMetaToTorrentMeta
foreach (var metadataEntry in Metadata.Where(m => Filetypes.SubtitleFileExtensions.Any(ext => m.Value.Filename.EndsWith(ext))))
{
var validFileIndex = int.TryParse(metadataEntry.Key, out var fileIndex);
var fileIndexMinusOne = Math.Max(0, fileIndex - 1);
var fileId = torrentFiles.FirstOrDefault(
t => Path.GetFileNameWithoutExtension(t.Title) == Path.GetFileNameWithoutExtension(metadataEntry.Value.Filename))?.Id ?? 0;
var file = new SubtitleFile
{
InfoHash = InfoHash,
FileIndex = validFileIndex ? fileIndexMinusOne : 0,
FileIndex = validFileIndex ? fileIndex : 0,
FileId = fileId,
Title = metadataEntry.Value.Filename,
};

View File

@@ -1 +1 @@
rank-torrent-name==0.2.13
rank-torrent-name==0.2.5

View File

@@ -8,14 +8,12 @@ public class PostgresConfiguration
private const string PasswordVariable = "PASSWORD";
private const string DatabaseVariable = "DB";
private const string PortVariable = "PORT";
private const string CommandTimeoutVariable = "COMMAND_TIMEOUT_SEC"; // Seconds
private string Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HostVariable);
private string Username { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(UsernameVariable);
private string Password { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(PasswordVariable);
private string Database { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(DatabaseVariable);
private int PORT { get; init; } = Prefix.GetEnvironmentVariableAsInt(PortVariable, 5432);
private int CommandTimeout { get; init; } = Prefix.GetEnvironmentVariableAsInt(CommandTimeoutVariable, 300);
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};CommandTimeout={CommandTimeout}";
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
}

View File

@@ -134,7 +134,7 @@ public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbSe
{
try
{
await using var connection = new NpgsqlConnection(configuration.StorageConnectionString);
await using var connection = CreateNpgsqlConnection();
await connection.OpenAsync();
await operation(connection);
@@ -145,6 +145,16 @@ public class ImdbDbService(PostgresConfiguration configuration, ILogger<ImdbDbSe
}
}
private NpgsqlConnection CreateNpgsqlConnection()
{
var connectionStringBuilder = new NpgsqlConnectionStringBuilder(configuration.StorageConnectionString)
{
CommandTimeout = 3000,
};
return new(connectionStringBuilder.ConnectionString);
}
private async Task ExecuteCommandWithTransactionAsync(Func<NpgsqlConnection, NpgsqlTransaction, Task> operation, NpgsqlTransaction transaction, string errorMessage)
{
try

View File

@@ -13,7 +13,7 @@
<PackageReference Include="Dapper" Version="2.1.35" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Npgsql" Version="8.0.3" />
<PackageReference Include="Npgsql" Version="8.0.2" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />

View File

@@ -1,43 +0,0 @@
-- Drop Duplicate Files in Files Table
DELETE FROM public.files
WHERE id NOT IN (
SELECT MAX(id)
FROM public.files
GROUP BY "infoHash", "fileIndex"
);
-- Add Index to files table
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_constraint
WHERE conname = 'files_unique_infohash_fileindex'
) THEN
ALTER TABLE public.files
ADD CONSTRAINT files_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
END IF;
END $$;
-- Drop Duplicate subtitles in Subtitles Table
DELETE FROM public.subtitles
WHERE id NOT IN (
SELECT MAX(id)
FROM public.subtitles
GROUP BY "infoHash", "fileIndex"
);
-- Add Index to subtitles table
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_constraint
WHERE conname = 'subtitles_unique_infohash_fileindex'
) THEN
ALTER TABLE public.subtitles
ADD CONSTRAINT subtitles_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
END IF;
END $$;

View File

@@ -4,38 +4,31 @@
{
"Name": "SyncEzTvJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://eztvx.to/ezrss.xml",
"XmlNamespace": "http://xmlns.ezrss.it/0.1/"
"Enabled": true
},
{
"Name": "SyncNyaaJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://nyaa.si/?page=rss&c=1_2&f=0",
"XmlNamespace": "https://nyaa.si/xmlns/nyaa"
"Enabled": true
},
{
"Name": "SyncTpbJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://apibay.org/precompiled/data_top100_recent.json"
"Enabled": true
},
{
"Name": "SyncYtsJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://yts.am/rss"
"Enabled": true
},
{
"Name": "SyncTgxJob",
"IntervalSeconds": 60,
"Enabled": true,
"Url": "https://tgx.rs/rss"
"Enabled": true
},
{
"Name": "SyncDmmJob",
"IntervalSeconds": 10800,
"IntervalSeconds": 1800,
"Enabled": true
},
{

View File

@@ -14,7 +14,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .

View File

@@ -6,12 +6,6 @@ public abstract class BaseJsonCrawler(IHttpClientFactory httpClientFactory, ILog
protected virtual async Task Execute(string collectionName)
{
if (string.IsNullOrWhiteSpace(Url))
{
logger.LogWarning("No URL provided for {Source} crawl", Source);
return;
}
logger.LogInformation("Starting {Source} crawl", Source);
using var client = httpClientFactory.CreateClient("Scraper");

View File

@@ -4,12 +4,6 @@ public abstract class BaseXmlCrawler(IHttpClientFactory httpClientFactory, ILogg
{
public override async Task Execute()
{
if (string.IsNullOrWhiteSpace(Url))
{
logger.LogWarning("No URL provided for {Source} crawl", Source);
return;
}
logger.LogInformation("Starting {Source} crawl", Source);
using var client = httpClientFactory.CreateClient(Literals.CrawlerClient);

View File

@@ -7,8 +7,4 @@ public class Scraper
public int IntervalSeconds { get; set; } = 60;
public bool Enabled { get; set; } = true;
public string? Url { get; set; }
public string? XmlNamespace { get; set; }
}

View File

@@ -1,70 +0,0 @@
namespace Producer.Features.Crawlers.Dmm;
public class DMMFileDownloader(HttpClient client, ILogger<DMMFileDownloader> logger) : IDMMFileDownloader
{
private const string Filename = "main.zip";
private readonly IReadOnlyCollection<string> _filesToIgnore = [
"index.html",
"404.html",
"dedupe.sh",
"CNAME",
];
public const string ClientName = "DmmFileDownloader";
public async Task<string> DownloadFileToTempPath(CancellationToken cancellationToken)
{
logger.LogInformation("Downloading DMM Hashlists");
var response = await client.GetAsync(Filename, cancellationToken);
var tempDirectory = Path.Combine(Path.GetTempPath(), "DMMHashlists");
EnsureDirectoryIsClean(tempDirectory);
response.EnsureSuccessStatusCode();
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
using var archive = new ZipArchive(stream);
logger.LogInformation("Extracting DMM Hashlists to {TempDirectory}", tempDirectory);
foreach (var entry in archive.Entries)
{
var entryPath = Path.Combine(tempDirectory, Path.GetFileName(entry.FullName));
if (!entry.FullName.EndsWith('/')) // It's a file
{
entry.ExtractToFile(entryPath, true);
}
}
foreach (var file in _filesToIgnore)
{
CleanRepoExtras(tempDirectory, file);
}
logger.LogInformation("Downloaded and extracted Repository to {TempDirectory}", tempDirectory);
return tempDirectory;
}
private static void CleanRepoExtras(string tempDirectory, string fileName)
{
var repoIndex = Path.Combine(tempDirectory, fileName);
if (File.Exists(repoIndex))
{
File.Delete(repoIndex);
}
}
private static void EnsureDirectoryIsClean(string tempDirectory)
{
if (Directory.Exists(tempDirectory))
{
Directory.Delete(tempDirectory, true);
}
Directory.CreateDirectory(tempDirectory);
}
}

View File

@@ -1,6 +0,0 @@
namespace Producer.Features.Crawlers.Dmm;
public class DMMHttpClient
{
}

View File

@@ -1,99 +1,64 @@
namespace Producer.Features.Crawlers.Dmm;
public partial class DebridMediaManagerCrawler(
IDMMFileDownloader dmmFileDownloader,
IHttpClientFactory httpClientFactory,
ILogger<DebridMediaManagerCrawler> logger,
IDataStorage storage,
GithubConfiguration githubConfiguration,
IRankTorrentName rankTorrentName,
IDistributedCache cache) : BaseCrawler(logger, storage)
{
[GeneratedRegex("""<iframe src="https:\/\/debridmediamanager.com\/hashlist#(.*)"></iframe>""")]
private static partial Regex HashCollectionMatcher();
protected override string Url => "";
private const string DownloadBaseUrl = "https://raw.githubusercontent.com/debridmediamanager/hashlists/main";
protected override IReadOnlyDictionary<string, string> Mappings => new Dictionary<string, string>();
protected override string Url => "https://api.github.com/repos/debridmediamanager/hashlists/git/trees/main?recursive=1";
protected override string Source => "DMM";
private const int ParallelismCount = 4;
public override async Task Execute()
{
var tempDirectory = await dmmFileDownloader.DownloadFileToTempPath(CancellationToken.None);
var client = httpClientFactory.CreateClient("Scraper");
client.DefaultRequestHeaders.Authorization = new("Bearer", githubConfiguration.PAT);
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
var files = Directory.GetFiles(tempDirectory, "*.html", SearchOption.AllDirectories);
var jsonBody = await client.GetStringAsync(Url);
logger.LogInformation("Found {Files} files to parse", files.Length);
var json = JsonDocument.Parse(jsonBody);
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
var entriesArray = json.RootElement.GetProperty("tree");
await Parallel.ForEachAsync(files, options, async (file, token) =>
logger.LogInformation("Found {Entries} total DMM pages", entriesArray.GetArrayLength());
foreach (var entry in entriesArray.EnumerateArray())
{
var fileName = Path.GetFileName(file);
var torrentDictionary = await ExtractPageContents(file, fileName);
if (torrentDictionary == null)
{
return;
}
await ParseTitlesWithRtn(fileName, torrentDictionary);
var results = await ParseTorrents(torrentDictionary);
if (results.Count <= 0)
{
return;
}
await InsertTorrents(results);
await Storage.MarkPageAsIngested(fileName, token);
});
await ParsePage(entry, client);
}
}
private async Task ParseTitlesWithRtn(string fileName, IDictionary<string, DmmContent> page)
private async Task ParsePage(JsonElement entry, HttpClient client)
{
logger.LogInformation("Parsing titles for {Page}", fileName);
var (pageIngested, name) = await IsAlreadyIngested(entry);
var batchProcessables = page.Select(value => new RtnBatchProcessable(value.Key, value.Value.Filename)).ToList();
var parsedResponses = rankTorrentName.BatchParse(
batchProcessables.Select<RtnBatchProcessable, string>(bp => bp.Filename).ToList(), trashGarbage: false);
// Filter out unsuccessful responses and match RawTitle to requesting title
var successfulResponses = parsedResponses
.Where(response => response != null && response.Success)
.GroupBy(response => response.Response.RawTitle!)
.ToDictionary(group => group.Key, group => group.First());
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
await Parallel.ForEachAsync(batchProcessables.Select(t => t.InfoHash), options, (infoHash, _) =>
if (string.IsNullOrEmpty(name) || pageIngested)
{
if (page.TryGetValue(infoHash, out var dmmContent) &&
successfulResponses.TryGetValue(dmmContent.Filename, out var parsedResponse))
{
page[infoHash] = dmmContent with { ParseResponse = parsedResponse };
}
return ValueTask.CompletedTask;
});
}
private async Task<ConcurrentDictionary<string, DmmContent>?> ExtractPageContents(string filePath, string filenameOnly)
{
var (pageIngested, name) = await IsAlreadyIngested(filenameOnly);
if (pageIngested)
{
return [];
return;
}
var pageSource = await File.ReadAllTextAsync(filePath);
var pageSource = await client.GetStringAsync($"{DownloadBaseUrl}/{name}");
await ExtractPageContents(pageSource, name);
}
private async Task ExtractPageContents(string pageSource, string name)
{
var match = HashCollectionMatcher().Match(pageSource);
if (!match.Success)
{
logger.LogWarning("Failed to match hash collection for {Name}", name);
await Storage.MarkPageAsIngested(filenameOnly);
return [];
await Storage.MarkPageAsIngested(name);
return;
}
var encodedJson = match.Groups.Values.ElementAtOrDefault(1);
@@ -101,165 +66,34 @@ public partial class DebridMediaManagerCrawler(
if (string.IsNullOrEmpty(encodedJson?.Value))
{
logger.LogWarning("Failed to extract encoded json for {Name}", name);
return [];
return;
}
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson.Value);
await ProcessExtractedContentsAsTorrentCollection(encodedJson.Value, name);
}
JsonElement arrayToProcess;
try
{
var json = JsonDocument.Parse(decodedJson);
private async Task ProcessExtractedContentsAsTorrentCollection(string encodedJson, string name)
{
var decodedJson = LZString.DecompressFromEncodedURIComponent(encodedJson);
if (json.RootElement.ValueKind == JsonValueKind.Object &&
json.RootElement.TryGetProperty("torrents", out var torrentsProperty) &&
torrentsProperty.ValueKind == JsonValueKind.Array)
{
arrayToProcess = torrentsProperty;
}
else if (json.RootElement.ValueKind == JsonValueKind.Array)
{
arrayToProcess = json.RootElement;
}
else
{
logger.LogWarning("Unexpected JSON format in {Name}", name);
return [];
}
}
catch (Exception ex)
var json = JsonDocument.Parse(decodedJson);
await InsertTorrentsForPage(json);
var result = await Storage.MarkPageAsIngested(name);
if (!result.IsSuccess)
{
logger.LogError("Failed to parse JSON {decodedJson} for {Name}: {Exception}", decodedJson, name, ex);
return [];
logger.LogWarning("Failed to mark page as ingested: [{Error}]", result.Failure.ErrorMessage);
return;
}
var torrents = await arrayToProcess.EnumerateArray()
.ToAsyncEnumerable()
.Select(ParsePageContent)
.Where(t => t is not null)
.ToListAsync();
if (torrents.Count == 0)
{
logger.LogWarning("No torrents found in {Name}", name);
await Storage.MarkPageAsIngested(filenameOnly);
return [];
}
var torrentDictionary = torrents
.Where(x => x is not null)
.GroupBy(x => x.InfoHash)
.ToConcurrentDictionary(g => g.Key, g => new DmmContent(g.First().Filename, g.First().Bytes, null));
logger.LogInformation("Parsed {Torrents} torrents for {Name}", torrentDictionary.Count, name);
return torrentDictionary;
logger.LogInformation("Successfully marked page as ingested");
}
private async Task<List<IngestedTorrent>> ParseTorrents(IDictionary<string, DmmContent> page)
private async Task<IngestedTorrent?> ParseTorrent(JsonElement item)
{
var ingestedTorrents = new List<IngestedTorrent>();
var options = new ParallelOptions { MaxDegreeOfParallelism = ParallelismCount };
await Parallel.ForEachAsync(page, options, async (kvp, ct) =>
{
var (infoHash, dmmContent) = kvp;
var parsedTorrent = dmmContent.ParseResponse;
if (parsedTorrent is not { Success: true })
{
return;
}
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
if (cached)
{
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Response.ParsedTitle);
lock (ingestedTorrents)
{
ingestedTorrents.Add(MapToTorrent(cachedResult, dmmContent.Bytes, infoHash, parsedTorrent));
}
return;
}
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year, ct);
if (imdbEntry is null)
{
return;
}
await AddToCache(cacheKey, imdbEntry);
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
lock (ingestedTorrents)
{
ingestedTorrents.Add(MapToTorrent(imdbEntry, dmmContent.Bytes, infoHash, parsedTorrent));
}
});
return ingestedTorrents;
}
private IngestedTorrent MapToTorrent(ImdbEntry result, long size, string infoHash, ParseTorrentTitleResponse parsedTorrent) =>
new()
{
Source = Source,
Name = result.Title,
Imdb = result.ImdbId,
Size = size.ToString(),
InfoHash = infoHash,
Seeders = 0,
Leechers = 0,
Category = AssignCategory(result),
RtnResponse = parsedTorrent.Response.ToJson(),
};
private Task AddToCache(string cacheKey, ImdbEntry best)
{
var cacheOptions = new DistributedCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1),
};
return cache.SetStringAsync(cacheKey, JsonSerializer.Serialize(best), cacheOptions);
}
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string cacheKey)
{
var cachedImdbId = await cache.GetStringAsync(cacheKey);
if (!string.IsNullOrEmpty(cachedImdbId))
{
return (true, JsonSerializer.Deserialize<ImdbEntry>(cachedImdbId));
}
return (false, null);
}
private async Task<(bool Success, string? Name)> IsAlreadyIngested(string filename)
{
var pageIngested = await Storage.PageIngested(filename);
return (pageIngested, filename);
}
private static string AssignCategory(ImdbEntry entry) =>
entry.Category.ToLower() switch
{
var category when string.Equals(category, "movie", StringComparison.OrdinalIgnoreCase) => "movies",
var category when string.Equals(category, "tvSeries", StringComparison.OrdinalIgnoreCase) => "tv",
_ => "unknown",
};
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
private static ExtractedDMMContent? ParsePageContent(JsonElement item)
{
if (!item.TryGetProperty("filename", out var filenameElement) ||
!item.TryGetProperty("bytes", out var bytesElement) ||
!item.TryGetProperty("hash", out var hashElement))
@@ -267,10 +101,122 @@ public partial class DebridMediaManagerCrawler(
return null;
}
return new(filenameElement.GetString(), bytesElement.GetInt64(), hashElement.GetString());
var torrentTitle = filenameElement.GetString();
if (torrentTitle.IsNullOrEmpty())
{
return null;
}
var parsedTorrent = rankTorrentName.Parse(torrentTitle);
if (!parsedTorrent.Success)
{
return null;
}
var torrentType = parsedTorrent.Response.IsMovie ? "movie" : "tvSeries";
var cacheKey = GetCacheKey(torrentType, parsedTorrent.Response.ParsedTitle, parsedTorrent.Response.Year);
var (cached, cachedResult) = await CheckIfInCacheAndReturn(cacheKey);
if (cached)
{
logger.LogInformation("[{ImdbId}] Found cached imdb result for {Title}", cachedResult.ImdbId, parsedTorrent.Response.ParsedTitle);
return MapToTorrent(cachedResult, bytesElement, hashElement, parsedTorrent);
}
int? year = parsedTorrent.Response.Year != 0 ? parsedTorrent.Response.Year : null;
var imdbEntry = await Storage.FindImdbMetadata(parsedTorrent.Response.ParsedTitle, torrentType, year);
if (imdbEntry is null)
{
return null;
}
await AddToCache(cacheKey, imdbEntry);
logger.LogInformation("[{ImdbId}] Found best match for {Title}: {BestMatch} with score {Score}", imdbEntry.ImdbId, parsedTorrent.Response.ParsedTitle, imdbEntry.Title, imdbEntry.Score);
return MapToTorrent(imdbEntry, bytesElement, hashElement, parsedTorrent);
}
private record DmmContent(string Filename, long Bytes, ParseTorrentTitleResponse? ParseResponse);
private record ExtractedDMMContent(string Filename, long Bytes, string InfoHash);
private record RtnBatchProcessable(string InfoHash, string Filename);
private IngestedTorrent MapToTorrent(ImdbEntry result, JsonElement bytesElement, JsonElement hashElement, ParseTorrentTitleResponse parsedTorrent) =>
new()
{
Source = Source,
Name = result.Title,
Imdb = result.ImdbId,
Size = bytesElement.GetInt64().ToString(),
InfoHash = hashElement.ToString(),
Seeders = 0,
Leechers = 0,
Category = AssignCategory(result),
RtnResponse = parsedTorrent.Response.ToJson(),
};
private Task AddToCache(string cacheKey, ImdbEntry best)
{
var cacheOptions = new DistributedCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1),
};
return cache.SetStringAsync(cacheKey, JsonSerializer.Serialize(best), cacheOptions);
}
private async Task<(bool Success, ImdbEntry? Entry)> CheckIfInCacheAndReturn(string cacheKey)
{
var cachedImdbId = await cache.GetStringAsync(cacheKey);
if (!string.IsNullOrEmpty(cachedImdbId))
{
return (true, JsonSerializer.Deserialize<ImdbEntry>(cachedImdbId));
}
return (false, null);
}
private async Task InsertTorrentsForPage(JsonDocument json)
{
var torrents = await json.RootElement.EnumerateArray()
.ToAsyncEnumerable()
.SelectAwait(async x => await ParseTorrent(x))
.Where(t => t is not null)
.ToListAsync();
if (torrents.Count == 0)
{
logger.LogWarning("No torrents found in {Source} response", Source);
return;
}
await InsertTorrents(torrents!);
}
private async Task<(bool Success, string? Name)> IsAlreadyIngested(JsonElement entry)
{
var name = entry.GetProperty("path").GetString();
if (string.IsNullOrEmpty(name))
{
return (false, null);
}
var pageIngested = await Storage.PageIngested(name);
return (pageIngested, name);
}
private static string AssignCategory(ImdbEntry entry) =>
entry.Category.ToLower() switch
{
var category when string.Equals(category, "movie", StringComparison.OrdinalIgnoreCase) => "movies",
var category when string.Equals(category, "tvSeries", StringComparison.OrdinalIgnoreCase) => "tv",
_ => "unknown",
};
private static string GetCacheKey(string category, string title, int year) => $"{category.ToLowerInvariant()}:{year}:{title.ToLowerInvariant()}";
}

View File

@@ -0,0 +1,9 @@
namespace Producer.Features.Crawlers.Dmm;
public class GithubConfiguration
{
private const string Prefix = "GITHUB";
private const string PatVariable = "PAT";
public string? PAT { get; init; } = Prefix.GetOptionalEnvironmentVariableAsString(PatVariable);
}

View File

@@ -1,6 +0,0 @@
namespace Producer.Features.Crawlers.Dmm;
public interface IDMMFileDownloader
{
Task<string> DownloadFileToTempPath(CancellationToken cancellationToken);
}

View File

@@ -1,17 +0,0 @@
namespace Producer.Features.Crawlers.Dmm;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddDmmSupport(this IServiceCollection services)
{
services.AddHttpClient<IDMMFileDownloader, DMMFileDownloader>(DMMFileDownloader.ClientName, client =>
{
client.BaseAddress = new("https://github.com/debridmediamanager/hashlists/zipball/main/");
client.DefaultRequestHeaders.Add("Accept-Encoding", "gzip");
client.DefaultRequestHeaders.UserAgent.ParseAdd("curl");
client.Timeout = TimeSpan.FromMinutes(10); // 10 minute timeout, #217
});
return services;
}
}

View File

@@ -1,10 +1,11 @@
namespace Producer.Features.Crawlers.EzTv;
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
public class EzTvCrawler(IHttpClientFactory httpClientFactory, ILogger<EzTvCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Url => "https://eztv1.xyz/ezrss.xml";
protected override string Source => "EZTV";
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncEzTvJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
private static readonly XNamespace XmlNamespace = "http://xmlns.ezrss.it/0.1/";
protected override IReadOnlyDictionary<string, string> Mappings =>
new Dictionary<string, string>

View File

@@ -1,10 +1,11 @@
namespace Producer.Features.Crawlers.Nyaa;
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
public class NyaaCrawler(IHttpClientFactory httpClientFactory, ILogger<NyaaCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Url => "https://nyaa.si/?page=rss&c=1_2&f=0";
protected override string Source => "Nyaa";
private XNamespace XmlNamespace => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncNyaaJob", StringComparison.OrdinalIgnoreCase))?.XmlNamespace ?? string.Empty;
private static readonly XNamespace XmlNamespace = "https://nyaa.si/xmlns/nyaa";
protected override IReadOnlyDictionary<string, string> Mappings =>
new Dictionary<string, string>

View File

@@ -1,13 +1,13 @@
namespace Producer.Features.Crawlers.Tgx;
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
public partial class TgxCrawler(IHttpClientFactory httpClientFactory, ILogger<TgxCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
[GeneratedRegex(@"Size:\s+(.+?)\s+Added")]
private static partial Regex SizeStringExtractor();
[GeneratedRegex(@"(?i)\b(\d+(\.\d+)?)\s*([KMGT]?B)\b", RegexOptions.None, "en-GB")]
private static partial Regex SizeStringParser();
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTgxJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Url => "https://tgx.rs/rss";
protected override string Source => "TorrentGalaxy";
protected override IReadOnlyDictionary<string, string> Mappings

View File

@@ -1,8 +1,8 @@
namespace Producer.Features.Crawlers.Tpb;
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseJsonCrawler(httpClientFactory, logger, storage)
public class TpbCrawler(IHttpClientFactory httpClientFactory, ILogger<TpbCrawler> logger, IDataStorage storage) : BaseJsonCrawler(httpClientFactory, logger, storage)
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncTpbJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Url => "https://apibay.org/precompiled/data_top100_recent.json";
protected override string Source => "TPB";

View File

@@ -1,8 +1,9 @@
namespace Producer.Features.Crawlers.Yts;
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage, ScrapeConfiguration scrapeConfiguration) : BaseXmlCrawler(httpClientFactory, logger, storage)
public class YtsCrawler(IHttpClientFactory httpClientFactory, ILogger<YtsCrawler> logger, IDataStorage storage) : BaseXmlCrawler(httpClientFactory, logger, storage)
{
protected override string Url => scrapeConfiguration.Scrapers.FirstOrDefault(x => x.Name.Equals("SyncYtsJob", StringComparison.OrdinalIgnoreCase))?.Url ?? string.Empty;
protected override string Url => "https://yts.am/rss";
protected override string Source => "YTS";
protected override IReadOnlyDictionary<string, string> Mappings
=> new Dictionary<string, string>

View File

@@ -5,6 +5,7 @@ internal static class ServiceCollectionExtensions
internal static IServiceCollection AddQuartz(this IServiceCollection services, IConfiguration configuration)
{
var scrapeConfiguration = services.LoadConfigurationFromConfig<ScrapeConfiguration>(configuration, ScrapeConfiguration.SectionName);
var githubConfiguration = services.LoadConfigurationFromEnv<GithubConfiguration>();
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
var jobTypes = Assembly.GetAssembly(typeof(BaseJob))
@@ -18,13 +19,18 @@ internal static class ServiceCollectionExtensions
services.AddTransient(type);
}
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
{
services.AddTransient<SyncDmmJob>();
}
var openMethod = typeof(ServiceCollectionExtensions).GetMethod(nameof(AddJobWithTrigger), BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance);
services.AddQuartz(
quartz =>
{
RegisterAutomaticRegistrationJobs(jobTypes, openMethod, quartz, scrapeConfiguration);
RegisterDmmJob(quartz, scrapeConfiguration);
RegisterDmmJob(githubConfiguration, quartz, scrapeConfiguration);
RegisterTorrentioJob(services, quartz, configuration, scrapeConfiguration);
RegisterPublisher(quartz, rabbitConfiguration);
});
@@ -58,8 +64,13 @@ internal static class ServiceCollectionExtensions
}
}
private static void RegisterDmmJob(IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration) =>
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
private static void RegisterDmmJob(GithubConfiguration githubConfiguration, IServiceCollectionQuartzConfigurator quartz, ScrapeConfiguration scrapeConfiguration)
{
if (!string.IsNullOrEmpty(githubConfiguration.PAT))
{
AddJobWithTrigger<SyncDmmJob>(quartz, SyncDmmJob.Key, SyncDmmJob.Trigger, scrapeConfiguration);
}
}
private static void RegisterTorrentioJob(
IServiceCollection services,

View File

@@ -1,12 +1,12 @@
// Global using directives
global using System.Collections.Concurrent;
global using System.IO.Compression;
global using System.Reflection;
global using System.Text;
global using System.Text.Json;
global using System.Text.RegularExpressions;
global using System.Xml.Linq;
global using FuzzySharp;
global using FuzzySharp.Extractor;
global using FuzzySharp.PreProcess;
global using FuzzySharp.SimilarityRatio.Scorer;
global using FuzzySharp.SimilarityRatio.Scorer.StrategySensitive;

View File

@@ -12,8 +12,7 @@ builder.Services
.RegisterMassTransit()
.AddDataStorage()
.AddCrawlers()
.AddDmmSupport()
.AddQuartz(builder.Configuration);
var app = builder.Build();
app.Run();
app.Run();

View File

@@ -1 +1 @@
rank-torrent-name==0.2.13
rank-torrent-name==0.2.5

View File

@@ -15,7 +15,7 @@ WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11 py3-pip && ln -sf python3 /usr/bin/python
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .

View File

@@ -44,7 +44,6 @@ public static class ServiceCollectionExtensions
{
var rabbitConfiguration = services.LoadConfigurationFromEnv<RabbitMqConfiguration>();
var redisConfiguration = services.LoadConfigurationFromEnv<RedisConfiguration>();
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
services.AddStackExchangeRedisCache(
option =>
@@ -81,8 +80,8 @@ public static class ServiceCollectionExtensions
e.ConfigureConsumer<WriteQbitMetadataConsumer>(context);
e.ConfigureConsumer<PerformQbitMetadataRequestConsumer>(context);
e.ConfigureSaga<QbitMetadataSagaState>(context);
e.ConcurrentMessageLimit = qbitConfiguration.Concurrency;
e.PrefetchCount = qbitConfiguration.Concurrency;
e.ConcurrentMessageLimit = 5;
e.PrefetchCount = 5;
});
});
});
@@ -99,7 +98,7 @@ public static class ServiceCollectionExtensions
cfg.UseTimeout(
timeout =>
{
timeout.Timeout = TimeSpan.FromMinutes(3);
timeout.Timeout = TimeSpan.FromMinutes(1);
});
})
.RedisRepository(redisConfiguration.ConnectionString, options =>
@@ -111,7 +110,7 @@ public static class ServiceCollectionExtensions
{
var qbitConfiguration = services.LoadConfigurationFromEnv<QbitConfiguration>();
var client = new QBittorrentClient(new(qbitConfiguration.Host));
client.Timeout = TimeSpan.FromSeconds(20);
client.Timeout = TimeSpan.FromSeconds(10);
services.AddSingleton<IQBittorrentClient>(client);
}

View File

@@ -1,6 +1,6 @@
namespace QBitCollector.Features.Qbit;
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger, QbitConfiguration configuration)
public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService trackersService, ILogger<QbitRequestProcessor> logger)
{
public async Task<IReadOnlyList<TorrentContent>?> ProcessAsync(string infoHash, CancellationToken cancellationToken = default)
{
@@ -14,7 +14,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
timeoutCts.CancelAfter(TimeSpan.FromSeconds(60));
timeoutCts.CancelAfter(TimeSpan.FromSeconds(30));
try
{
@@ -30,7 +30,7 @@ public class QbitRequestProcessor(IQBittorrentClient client, ITrackersService tr
break;
}
await Task.Delay(TimeSpan.FromMilliseconds(200), timeoutCts.Token);
await Task.Delay(TimeSpan.FromSeconds(1), timeoutCts.Token);
}
}
catch (OperationCanceledException) when (timeoutCts.IsCancellationRequested)

View File

@@ -5,10 +5,7 @@ public class QbitConfiguration
private const string Prefix = "QBIT";
private const string HOST_VARIABLE = "HOST";
private const string TRACKERS_URL_VARIABLE = "TRACKERS_URL";
private const string CONCURRENCY_VARIABLE = "CONCURRENCY";
public string? Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HOST_VARIABLE);
public string? TrackersUrl { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(TRACKERS_URL_VARIABLE);
public int Concurrency { get; init; } = Prefix.GetEnvironmentVariableAsInt(CONCURRENCY_VARIABLE, 8);
}

View File

@@ -5,12 +5,6 @@ public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataSt
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
{
var request = context.Message;
if (request.Metadata.Metadata.Count == 0)
{
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
return;
}
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);

View File

@@ -1 +1 @@
rank-torrent-name==0.2.13
rank-torrent-name==0.2.5

View File

@@ -8,14 +8,12 @@ public class PostgresConfiguration
private const string PasswordVariable = "PASSWORD";
private const string DatabaseVariable = "DB";
private const string PortVariable = "PORT";
private const string CommandTimeoutVariable = "COMMAND_TIMEOUT_SEC"; // Seconds
private string Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HostVariable);
private string Username { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(UsernameVariable);
private string Password { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(PasswordVariable);
private string Database { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(DatabaseVariable);
private int PORT { get; init; } = Prefix.GetEnvironmentVariableAsInt(PortVariable, 5432);
private int CommandTimeout { get; init; } = Prefix.GetEnvironmentVariableAsInt(CommandTimeoutVariable, 300);
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};CommandTimeout={CommandTimeout}";
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
}

View File

@@ -152,8 +152,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
INSERT INTO files
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
VALUES
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now())
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now());
""";
await connection.ExecuteAsync(query, files);
@@ -168,8 +167,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
INSERT INTO subtitles
("infoHash", "fileIndex", "fileId", "title")
VALUES
(@InfoHash, @FileIndex, @FileId, @Title)
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
(@InfoHash, @FileIndex, @FileId, @Title);
""";
await connection.ExecuteAsync(query, subtitles);

View File

@@ -1,19 +0,0 @@
namespace SharedContracts.Extensions;
public static class DictionaryExtensions
{
public static ConcurrentDictionary<TKey, TValue> ToConcurrentDictionary<TSource, TKey, TValue>(
this IEnumerable<TSource> source,
Func<TSource, TKey> keySelector,
Func<TSource, TValue> valueSelector) where TKey : notnull
{
var concurrentDictionary = new ConcurrentDictionary<TKey, TValue>();
foreach (var element in source)
{
concurrentDictionary.TryAdd(keySelector(element), valueSelector(element));
}
return concurrentDictionary;
}
}

View File

@@ -1,6 +1,5 @@
// Global using directives
global using System.Collections.Concurrent;
global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Text.RegularExpressions;

View File

@@ -2,6 +2,5 @@ namespace SharedContracts.Python.RTN;
public interface IRankTorrentName
{
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false);
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true);
}

View File

@@ -12,102 +12,41 @@ public class RankTorrentName : IRankTorrentName
_pythonEngineService = pythonEngineService;
InitModules();
}
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true) =>
_pythonEngineService.ExecutePythonOperationWithDefault(
() =>
{
var result = _rtn?.parse(title, trashGarbage);
return ParseResult(result);
}, new ParseTorrentTitleResponse(false, null), nameof(Parse), throwOnErrors: false, logErrors: false);
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
private static ParseTorrentTitleResponse ParseResult(dynamic result)
{
try
{
using var gil = Py.GIL();
var result = _rtn?.parse(title, trashGarbage);
return ParseResult(result);
}
catch (Exception ex)
{
if (logErrors)
{
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
}
if (throwOnErrors)
{
throw;
}
return new(false, null);
}
}
public List<ParseTorrentTitleResponse?> BatchParse(IReadOnlyCollection<string> titles, int chunkSize = 500, int workers = 20, bool trashGarbage = true, bool logErrors = false, bool throwOnErrors = false)
{
var responses = new List<ParseTorrentTitleResponse?>();
try
{
if (titles.Count == 0)
{
return responses;
}
using var gil = Py.GIL();
var pythonList = new PyList(titles.Select(x => new PyString(x).As<PyObject>()).ToArray());
PyList results = _rtn?.batch_parse(pythonList, trashGarbage, chunkSize, workers);
if (results == null)
{
return responses;
}
responses.AddRange(results.Select(ParseResult));
}
catch (Exception ex)
{
if (logErrors)
{
_pythonEngineService.Logger.LogError(ex, "Python Error: {Message} ({OperationName})", ex.Message, nameof(Parse));
}
if (throwOnErrors)
{
throw;
}
}
return responses;
}
private static ParseTorrentTitleResponse? ParseResult(dynamic result)
{
try
{
if (result == null)
{
return new(false, null);
}
var json = result.model_dump_json()?.As<string?>();
if (json is null || string.IsNullOrEmpty(json))
{
return new(false, null);
}
var mediaType = result.GetAttr("type")?.As<string>();
if (string.IsNullOrEmpty(mediaType))
{
return new(false, null);
}
var response = JsonSerializer.Deserialize<RtnResponse>(json);
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
return new(true, response);
}
catch
if (result == null)
{
return new(false, null);
}
var json = result.model_dump_json()?.As<string?>();
if (json is null || string.IsNullOrEmpty(json))
{
return new(false, null);
}
var mediaType = result.GetAttr("type")?.As<string>();
if (string.IsNullOrEmpty(mediaType))
{
return new(false, null);
}
var response = JsonSerializer.Deserialize<RtnResponse>(json);
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
return new(true, response);
}
private void InitModules() =>

View File

@@ -15,7 +15,7 @@
<PackageReference Include="Dapper" Version="2.1.35" />
<PackageReference Include="MassTransit.Abstractions" Version="8.2.0" />
<PackageReference Include="MassTransit.RabbitMQ" Version="8.2.0" />
<PackageReference Include="Npgsql" Version="8.0.3" />
<PackageReference Include="Npgsql" Version="8.0.2" />
<PackageReference Include="pythonnet" Version="3.0.3" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" />

View File

@@ -8,14 +8,12 @@ public class PostgresConfiguration
private const string PasswordVariable = "PASSWORD";
private const string DatabaseVariable = "DB";
private const string PortVariable = "PORT";
private const string CommandTimeoutVariable = "COMMAND_TIMEOUT_SEC"; // Seconds
private string Host { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(HostVariable);
private string Username { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(UsernameVariable);
private string Password { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(PasswordVariable);
private string Database { get; init; } = Prefix.GetRequiredEnvironmentVariableAsString(DatabaseVariable);
private int PORT { get; init; } = Prefix.GetEnvironmentVariableAsInt(PortVariable, 5432);
private int CommandTimeout { get; init; } = Prefix.GetEnvironmentVariableAsInt(CommandTimeoutVariable, 300);
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};CommandTimeout={CommandTimeout}";
public string StorageConnectionString => $"Host={Host};Port={PORT};Username={Username};Password={Password};Database={Database};";
}

View File

@@ -12,7 +12,7 @@
<PackageReference Include="Dapper" Version="2.1.28" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Npgsql" Version="8.0.3" />
<PackageReference Include="Npgsql" Version="8.0.1" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />