Renames project to Knight Crawler

This commit is contained in:
Gabisonfire
2024-02-03 23:21:20 -05:00
parent a8173a0ba7
commit 5fe3f471e9
19 changed files with 77 additions and 56 deletions

View File

@@ -1,19 +1,26 @@
# Selfhostio # Knight Crawler
<img src="https://i.ibb.co/hYJPLdP/logo-only.png" alt="isolated" width="100"/>
A self-hosted Stremio addon for streaming torrents via a debrid service. A self-hosted Stremio addon for streaming torrents via a debrid service.
## Contents ## Contents
- [Selfhostio](#selfhostio) **Note: Until we reach `v1.0.0`, please consider releases as alpha.**
**Important: The latest change renames the project and requires a [small migration](#selfhostio-to-knightcrawler-migration).**
- [Knight Crawler](#knight-crawler)
- [Contents](#contents) - [Contents](#contents)
- [Overview](#overview) - [Overview](#overview)
- [Using](#using) - [Using](#using)
- [Initial setup (optional)](#initial-setup-optional) - [Initial setup (optional)](#initial-setup-optional)
- [Run the project](#run-the-project) - [Run the project](#run-the-project)
- [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional) - [Monitoring with Grafana and Prometheus (Optional)](#monitoring-with-grafana-and-prometheus-optional)
- [Accessing RabbitMQ Management](#accessing-rabbitmq-management)
- [Using Grafana and Prometheus](#using-grafana-and-prometheus)
- [Importing external dumps](#importing-external-dumps) - [Importing external dumps](#importing-external-dumps)
- [Import data into database](#import-data-into-database) - [Import data into database](#import-data-into-database)
- [INSERT INTO ingested\_torrents](#insert-into-ingested_torrents) - [INSERT INTO ingested\_torrents](#insert-into-ingested_torrents)
- [Selfhostio to KnightCrawler Migration](#selfhostio-to-knightcrawler-migration)
- [To-do](#to-do) - [To-do](#to-do)
@@ -39,7 +46,7 @@ We can search DebridMediaManager hash lists which are hosted on GitHub. This all
3. Fill out the form (example data below): 3. Fill out the form (example data below):
``` ```
Token name: Token name:
Selfhostio KnightCrawler
Expiration: Expiration:
90 days 90 days
Description: Description:
@@ -112,7 +119,7 @@ I created a file `db.load` as follows..
``` ```
load database load database
from sqlite:///tmp/rarbg_db.sqlite from sqlite:///tmp/rarbg_db.sqlite
into postgresql://postgres:postgres@localhost/selfhostio into postgresql://postgres:postgres@localhost/knightcrawler
with include drop, create tables, create indexes, reset sequences with include drop, create tables, create indexes, reset sequences
@@ -131,6 +138,18 @@ SELECT title, 'RARBG', cat, hash, size, NULL, NULL, imdb, false, current_timesta
FROM items where cat='tv' OR cat='movies'; FROM items where cat='tv' OR cat='movies';
``` ```
## Selfhostio to KnightCrawler Migration
With the renaming of the project, you will have to change your database name in order to keep your existing data.
**With your existing stack still running**, run:
```
docker exec -it torrentio-selfhostio-postgres-1 psql -c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname = 'selfhostio'; ALTER DATABASE selfhostio RENAME TO knightcrawler;"
```
Make sure your postgres container is named `torrentio-selfhostio-postgres-1`, otherwise, adjust accordingly.
This command should return: `ALTER DATABASE`. This means your database is now renamed. You can now pull the new changes if you haven't already and run `docker-compose up -d`.
## To-do ## To-do
- [ ] Add a section on external access - [ ] Add a section on external access

View File

@@ -1,5 +1,5 @@
version: '3.8' version: '3.8'
name: torrentio-metrics name: knightcrawler-metrics
services: services:
prometheus: prometheus:
@@ -11,7 +11,7 @@ services:
ports: ports:
- "9090:9090" - "9090:9090"
networks: networks:
- torrentio-network - knightcrawler-network
grafana: grafana:
image: grafana/grafana:latest image: grafana/grafana:latest
@@ -25,17 +25,17 @@ services:
depends_on: depends_on:
- prometheus - prometheus
networks: networks:
- torrentio-network - knightcrawler-network
postgres-exporter: postgres-exporter:
image: prometheuscommunity/postgres-exporter image: prometheuscommunity/postgres-exporter
ports: ports:
- "9187:9187" - "9187:9187"
environment: environment:
DATA_SOURCE_NAME: "postgresql://postgres:postgres@postgres:5432/selfhostio?sslmode=disable" DATA_SOURCE_NAME: "postgresql://postgres:postgres@postgres:5432/knightcrawler?sslmode=disable"
networks: networks:
- torrentio-network - knightcrawler-network
networks: networks:
torrentio-network: knightcrawler-network:
external: true external: true

View File

@@ -1,5 +1,5 @@
version: '3.8' version: '3.8'
name: torrentio-selfhostio name: knightcrawler
x-restart: &restart-policy x-restart: &restart-policy
"unless-stopped" "unless-stopped"
@@ -22,7 +22,7 @@ x-postgreshealth: &postgresdb-health
test: pg_isready test: pg_isready
<<: *base-health <<: *base-health
x-apps: &selfhostio-app x-apps: &knightcrawler-app
depends_on: depends_on:
mongodb: mongodb:
condition: service_healthy condition: service_healthy
@@ -38,7 +38,7 @@ services:
environment: environment:
POSTGRES_USER: postgres POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
POSTGRES_DB: selfhostio POSTGRES_DB: knightcrawler
PGUSER: postgres # needed for healthcheck. PGUSER: postgres # needed for healthcheck.
ports: ports:
- "5432:5432" - "5432:5432"
@@ -47,7 +47,7 @@ services:
healthcheck: *postgresdb-health healthcheck: *postgresdb-health
restart: *restart-policy restart: *restart-policy
networks: networks:
- torrentio-network - knightcrawler-network
mongodb: mongodb:
image: mongo:latest image: mongo:latest
@@ -61,7 +61,7 @@ services:
restart: *restart-policy restart: *restart-policy
healthcheck: *mongodb-health healthcheck: *mongodb-health
networks: networks:
- torrentio-network - knightcrawler-network
rabbitmq: rabbitmq:
image: rabbitmq:3-management image: rabbitmq:3-management
@@ -74,7 +74,7 @@ services:
restart: *restart-policy restart: *restart-policy
healthcheck: *rabbitmq-health healthcheck: *rabbitmq-health
networks: networks:
- torrentio-network - knightcrawler-network
producer: producer:
build: build:
@@ -82,9 +82,9 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
env_file: env_file:
- env/producer.env - env/producer.env
<<: *selfhostio-app <<: *knightcrawler-app
networks: networks:
- torrentio-network - knightcrawler-network
consumer: consumer:
build: build:
@@ -94,9 +94,9 @@ services:
- env/consumer.env - env/consumer.env
deploy: deploy:
replicas: 3 replicas: 3
<<: *selfhostio-app <<: *knightcrawler-app
networks: networks:
- torrentio-network - knightcrawler-network
addon: addon:
build: build:
@@ -106,14 +106,14 @@ services:
- "7000:7000" - "7000:7000"
env_file: env_file:
- env/addon.env - env/addon.env
<<: *selfhostio-app <<: *knightcrawler-app
networks: networks:
- torrentio-network - knightcrawler-network
networks: networks:
torrentio-network: knightcrawler-network:
driver: bridge driver: bridge
name: torrentio-network name: knightcrawler-network
volumes: volumes:
postgres: postgres:

4
env/addon.env vendored
View File

@@ -1,4 +1,4 @@
TZ=London/Europe TZ=London/Europe
DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
MONGODB_URI=mongodb://mongo:mongo@mongodb/selfhostio?tls=false&authSource=admin MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
DEBUG_MODE=false DEBUG_MODE=false

4
env/consumer.env vendored
View File

@@ -1,6 +1,6 @@
TZ=London/Europe TZ=London/Europe
MONGODB_URI=mongodb://mongo:mongo@mongodb/selfhostio?tls=false&authSource=admin MONGODB_URI=mongodb://mongo:mongo@mongodb/knightcrawler?tls=false&authSource=admin
DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio DATABASE_URI=postgres://postgres:postgres@postgres/knightcrawler
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30 RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
QUEUE_NAME=ingested QUEUE_NAME=ingested
JOB_CONCURRENCY=5 JOB_CONCURRENCY=5

2
env/producer.env vendored
View File

@@ -1,4 +1,4 @@
ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=selfhostio; ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=knightcrawler;
RabbitMqConfiguration__Host=rabbitmq RabbitMqConfiguration__Host=rabbitmq
RabbitMqConfiguration__QueueName=ingested RabbitMqConfiguration__QueueName=ingested
RabbitMqConfiguration__Username=guest RabbitMqConfiguration__Username=guest

View File

@@ -6,7 +6,7 @@ import { BadTokenError, AccessDeniedError, sameFilename } from './mochHelper.js'
import StaticResponse from './static.js'; import StaticResponse from './static.js';
const KEY = 'alldebrid'; const KEY = 'alldebrid';
const AGENT = 'torrentio'; const AGENT = 'knightcrawler';
export async function getCachedStreams(streams, apiKey) { export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions(); const options = await getDefaultOptions();

View File

@@ -224,14 +224,14 @@ function blackListToken(token, mochKey) {
function errorStreamResponse(mochKey, error, config) { function errorStreamResponse(mochKey, error, config) {
if (error === BadTokenError) { if (error === BadTokenError) {
return { return {
name: `Torrentio\n${MochOptions[mochKey].shortName} error`, name: `Knightcrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`, title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}` url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
}; };
} }
if (error === AccessDeniedError) { if (error === AccessDeniedError) {
return { return {
name: `Torrentio\n${MochOptions[mochKey].shortName} error`, name: `Knightcrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`, title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}` url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
}; };

View File

@@ -1,7 +1,7 @@
module.exports = { module.exports = {
apps: [ apps: [
{ {
name: "torrentio-selfhostio", name: "knightcrawler",
script: "npm start", script: "npm start",
cwd: "/app", cwd: "/app",
watch: ["./dist/index.cjs"], watch: ["./dist/index.cjs"],

View File

@@ -1,11 +1,11 @@
{ {
"name": "selfhostio-addon", "name": "knightcrawler-addon",
"version": "0.0.1", "version": "0.0.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "selfhostio-addon", "name": "knightcrawler-addon",
"version": "0.0.1", "version": "0.0.1",
"dependencies": { "dependencies": {
"@putdotio/api-client": "^8.42.0", "@putdotio/api-client": "^8.42.0",

View File

@@ -1,5 +1,5 @@
{ {
"name": "selfhostio-addon", "name": "knightcrawler-addon",
"version": "0.0.1", "version": "0.0.1",
"type": "module", "type": "module",
"scripts": { "scripts": {

View File

@@ -44,7 +44,7 @@ builder.defineStreamHandler((args) => {
builder.defineCatalogHandler((args) => { builder.defineCatalogHandler((args) => {
const mochKey = args.id.replace("selfhostio-", ''); const mochKey = args.id.replace("knightcrawler-", '');
console.log(`Incoming catalog ${args.id} request with skip=${args.extra.skip || 0}`) console.log(`Incoming catalog ${args.id} request with skip=${args.extra.skip || 0}`)
return getMochCatalog(mochKey, args.extra) return getMochCatalog(mochKey, args.extra)
.then(metas => ({ .then(metas => ({

View File

@@ -2,7 +2,7 @@ import cacheManager from 'cache-manager';
import mangodbStore from 'cache-manager-mongodb'; import mangodbStore from 'cache-manager-mongodb';
import { isStaticUrl } from '../moch/static.js'; import { isStaticUrl } from '../moch/static.js';
const GLOBAL_KEY_PREFIX = 'selfhostio-addon'; const GLOBAL_KEY_PREFIX = 'knightcrawler-addon';
const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`; const STREAM_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|stream`;
const AVAILABILITY_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|availability`; const AVAILABILITY_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|availability`;
const RESOLVED_URL_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|resolved`; const RESOLVED_URL_KEY_PREFIX = `${GLOBAL_KEY_PREFIX}|resolved`;
@@ -28,7 +28,7 @@ function initiateRemoteCache() {
store: mangodbStore, store: mangodbStore,
uri: MONGO_URI, uri: MONGO_URI,
options: { options: {
collection: 'selfhostio_addon_collection', collection: 'knightcrawler_addon_collection',
socketTimeoutMS: 120000, socketTimeoutMS: 120000,
useNewUrlParser: true, useNewUrlParser: true,
useUnifiedTopology: false, useUnifiedTopology: false,

View File

@@ -6,13 +6,15 @@ const CatalogMochs = Object.values(MochOptions).filter(moch => moch.catalog);
export function manifest(config = {}) { export function manifest(config = {}) {
return { return {
id: 'com.stremio.selfhostio.selfhostio', id: 'com.stremio.knightcrawler.knightcrawler',
version: '0.0.1', version: 'v0.0.1',
name: getName(config), name: getName(config),
description: getDescription(config), description: getDescription(),
catalogs: getCatalogs(config), catalogs: getCatalogs(config),
resources: getResources(config), resources: getResources(config),
types: [Type.MOVIE, Type.SERIES, Type.ANIME, Type.OTHER], types: [Type.MOVIE, Type.SERIES, Type.ANIME, Type.OTHER],
background: "https://i.ibb.co/9pXGycn/logo-color.png",
logo: "https://i.ibb.co/hYJPLdP/logo-only.png",
behaviorHints: { behaviorHints: {
configurable: true, configurable: true,
configurationRequired: false, configurationRequired: false,
@@ -28,7 +30,7 @@ export function dummyManifest() {
} }
function getName(config) { function getName(config) {
const rootName = 'selfhostio'; const rootName = 'Knight Crawler';
const mochSuffix = Object.values(MochOptions) const mochSuffix = Object.values(MochOptions)
.filter(moch => config[moch.key]) .filter(moch => config[moch.key])
.map(moch => moch.shortName) .map(moch => moch.shortName)
@@ -36,15 +38,15 @@ function getName(config) {
return [rootName, mochSuffix].filter(v => v).join(' '); return [rootName, mochSuffix].filter(v => v).join(' ');
} }
function getDescription(config) { function getDescription() {
return 'Selfhostio the Torrentio brings you much Funio'; return 'Selfhost the Torrentio brings you much Funio';
} }
function getCatalogs(config) { function getCatalogs(config) {
return CatalogMochs return CatalogMochs
.filter(moch => showDebridCatalog(config) && config[moch.key]) .filter(moch => showDebridCatalog(config) && config[moch.key])
.map(moch => ({ .map(moch => ({
id: `selfhostio-${moch.key}`, id: `knightcrawler-${moch.key}`,
name: `${moch.name}`, name: `${moch.name}`,
type: 'other', type: 'other',
extra: [{ name: 'skip' }], extra: [{ name: 'skip' }],

View File

@@ -4,7 +4,7 @@ import { getSources } from './magnetHelper.js';
import { getSubtitles } from './subtitles.js'; import { getSubtitles } from './subtitles.js';
import { Type } from './types.js'; import { Type } from './types.js';
const ADDON_NAME = 'selfhostio'; const ADDON_NAME = 'knightcrawler';
const SIZE_DELTA = 0.02; const SIZE_DELTA = 0.02;
const UNKNOWN_SIZE = 300000000; const UNKNOWN_SIZE = 300000000;
const CAM_SOURCES = ['CAM', 'TeleSync', 'TeleCine', 'SCR']; const CAM_SOURCES = ['CAM', 'TeleSync', 'TeleCine', 'SCR'];
@@ -38,7 +38,7 @@ export function toStreamInfo(record) {
'\n' '\n'
); );
const bingeGroupParts = getBingeGroupParts(record, sameInfo, quality, torrentInfo, fileInfo); const bingeGroupParts = getBingeGroupParts(record, sameInfo, quality, torrentInfo, fileInfo);
const bingeGroup = joinDetailParts(bingeGroupParts, "selfhostio|", "|") const bingeGroup = joinDetailParts(bingeGroupParts, "knightcrawler|", "|")
const behaviorHints = bingeGroup ? { bingeGroup } : undefined; const behaviorHints = bingeGroup ? { bingeGroup } : undefined;
return cleanOutputObject({ return cleanOutputObject({

View File

@@ -6,7 +6,7 @@ import { BadTokenError, AccessDeniedError, sameFilename } from './mochHelper.js'
import StaticResponse from './static.js'; import StaticResponse from './static.js';
const KEY = 'alldebrid'; const KEY = 'alldebrid';
const AGENT = 'selfhostio'; const AGENT = 'knightcrawler';
export async function getCachedStreams(streams, apiKey) { export async function getCachedStreams(streams, apiKey) {
const options = await getDefaultOptions(); const options = await getDefaultOptions();

View File

@@ -224,14 +224,14 @@ function blackListToken(token, mochKey) {
function errorStreamResponse(mochKey, error, config) { function errorStreamResponse(mochKey, error, config) {
if (error === BadTokenError) { if (error === BadTokenError) {
return { return {
name: `Selfhostio\n${MochOptions[mochKey].shortName} error`, name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`, title: `Invalid ${MochOptions[mochKey].name} ApiKey/Token!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}` url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
}; };
} }
if (error === AccessDeniedError) { if (error === AccessDeniedError) {
return { return {
name: `Selfhostio\n${MochOptions[mochKey].shortName} error`, name: `KnightCrawler\n${MochOptions[mochKey].shortName} error`,
title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`, title: `Expired/invalid ${MochOptions[mochKey].name} subscription!`,
url: `${config.host}/${StaticResponse.FAILED_ACCESS}` url: `${config.host}/${StaticResponse.FAILED_ACCESS}`
}; };

View File

@@ -4,7 +4,7 @@ import { cacheConfig } from './config.js';
import { logger } from './logger.js'; import { logger } from './logger.js';
import { CacheType } from "./types.js"; import { CacheType } from "./types.js";
const GLOBAL_KEY_PREFIX = 'selfhostio-consumer'; const GLOBAL_KEY_PREFIX = 'knightcrawler-consumer';
const IMDB_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|imdb_id`; const IMDB_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|imdb_id`;
const KITSU_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|kitsu_id`; const KITSU_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|kitsu_id`;
const METADATA_PREFIX = `${GLOBAL_KEY_PREFIX}|metadata`; const METADATA_PREFIX = `${GLOBAL_KEY_PREFIX}|metadata`;
@@ -26,7 +26,7 @@ const initiateMongoCache = () => {
url: cacheConfig.MONGO_URI, url: cacheConfig.MONGO_URI,
mongoConfig:{ mongoConfig:{
socketTimeoutMS: 120000, socketTimeoutMS: 120000,
appName: 'selfhostio-consumer', appName: 'knightcrawler-consumer',
} }
}); });

View File

@@ -4,13 +4,13 @@
} }
export const cacheConfig = { export const cacheConfig = {
MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/selfhostio?authSource=admin', MONGO_URI: process.env.MONGODB_URI || 'mongodb://mongo:mongo@localhost:27017/knightcrawler?authSource=admin',
NO_CACHE: parseBool(process.env.NO_CACHE, false), NO_CACHE: parseBool(process.env.NO_CACHE, false),
COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'selfhostio_consumer_collection' COLLECTION_NAME: process.env.MONGODB_COLLECTION || 'knightcrawler_consumer_collection'
} }
export const databaseConfig = { export const databaseConfig = {
DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/selfhostio', DATABASE_URI: process.env.DATABASE_URI || 'postgres://postgres:postgres@localhost:5432/knightcrawler',
ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true) ENABLE_SYNC: parseBool(process.env.ENABLE_SYNC, true)
} }