ioc implemented
This commit is contained in:
@@ -13,7 +13,7 @@ try {
|
||||
build({
|
||||
bundle: true,
|
||||
entryPoints: [
|
||||
"./src/index.ts",
|
||||
"./src/main.ts",
|
||||
],
|
||||
external: [...(devDependencies && Object.keys(devDependencies))],
|
||||
keepNames: true,
|
||||
|
||||
6
src/node/consumer/package-lock.json
generated
6
src/node/consumer/package-lock.json
generated
@@ -15,6 +15,7 @@
|
||||
"bottleneck": "^2.19.5",
|
||||
"cache-manager": "^5.4.0",
|
||||
"google-sr": "^3.2.1",
|
||||
"inversify": "^6.0.2",
|
||||
"magnet-uri": "^6.2.0",
|
||||
"moment": "^2.30.1",
|
||||
"name-to-imdb": "^3.0.4",
|
||||
@@ -2833,6 +2834,11 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/inversify": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/inversify/-/inversify-6.0.2.tgz",
|
||||
"integrity": "sha512-i9m8j/7YIv4mDuYXUAcrpKPSaju/CIly9AHK5jvCBeoiM/2KEsuCQTTP+rzSWWpLYWRukdXFSl6ZTk2/uumbiA=="
|
||||
},
|
||||
"node_modules/ip": {
|
||||
"version": "1.1.8",
|
||||
"license": "MIT"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "node esbuild.js",
|
||||
"dev": "tsx watch --ignore node_modules src/index.ts | pino-pretty",
|
||||
"dev": "tsx watch --ignore node_modules src/main.ts | pino-pretty",
|
||||
"start": "node dist/index.cjs",
|
||||
"lint": "npx eslint ./src --ext .ts,.js"
|
||||
},
|
||||
@@ -16,6 +16,7 @@
|
||||
"bottleneck": "^2.19.5",
|
||||
"cache-manager": "^5.4.0",
|
||||
"google-sr": "^3.2.1",
|
||||
"inversify": "^6.0.2",
|
||||
"magnet-uri": "^6.2.0",
|
||||
"moment": "^2.30.1",
|
||||
"name-to-imdb": "^3.0.4",
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { processTorrentsJob } from './jobs/process_torrents_job.js';
|
||||
import { repository } from "./repository/database_repository";
|
||||
import { trackerService } from "./lib/services/tracker_service";
|
||||
|
||||
(async () => {
|
||||
await trackerService.getTrackers();
|
||||
await repository.connect();
|
||||
await processTorrentsJob.listenToQueue();
|
||||
})();
|
||||
3
src/node/consumer/src/interfaces/process_torrents_job.ts
Normal file
3
src/node/consumer/src/interfaces/process_torrents_job.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface IProcessTorrentsJob {
|
||||
listenToQueue: () => Promise<void>;
|
||||
}
|
||||
@@ -2,14 +2,26 @@
|
||||
import {IIngestedRabbitMessage, IIngestedRabbitTorrent} from "../lib/interfaces/ingested_rabbit_message";
|
||||
import {IIngestedTorrentAttributes} from "../repository/interfaces/ingested_torrent_attributes";
|
||||
import {configurationService} from '../lib/services/configuration_service';
|
||||
import {torrentProcessingService} from '../lib/services/torrent_processing_service';
|
||||
import {logger} from '../lib/services/logging_service';
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../lib/models/ioc_types";
|
||||
import {ITorrentProcessingService} from "../lib/interfaces/torrent_processing_service";
|
||||
import {ILoggingService} from "../lib/interfaces/logging_service";
|
||||
import {IProcessTorrentsJob} from "../interfaces/process_torrents_job";
|
||||
|
||||
class ProcessTorrentsJob {
|
||||
@injectable()
|
||||
export class ProcessTorrentsJob implements IProcessTorrentsJob {
|
||||
private readonly assertQueueOptions: Options.AssertQueue = {durable: true};
|
||||
private readonly consumeQueueOptions: Options.Consume = {noAck: false};
|
||||
private torrentProcessingService: ITorrentProcessingService;
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.ITorrentProcessingService) torrentProcessingService: ITorrentProcessingService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService){
|
||||
this.torrentProcessingService = torrentProcessingService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public listenToQueue = async ()=> {
|
||||
public listenToQueue = async () => {
|
||||
if (!configurationService.jobConfig.JOBS_ENABLED) {
|
||||
return;
|
||||
}
|
||||
@@ -19,12 +31,12 @@ class ProcessTorrentsJob {
|
||||
const channel: Channel = await connection.createChannel();
|
||||
await this.assertAndConsumeQueue(channel);
|
||||
} catch (error) {
|
||||
logger.error('Failed to connect and setup channel', error);
|
||||
this.logger.error('Failed to connect and setup channel', error);
|
||||
}
|
||||
}
|
||||
private processMessage = (msg: ConsumeMessage) => {
|
||||
const ingestedTorrent: IIngestedTorrentAttributes = this.getMessageAsJson(msg);
|
||||
return torrentProcessingService.processTorrentRecord(ingestedTorrent);
|
||||
return this.torrentProcessingService.processTorrentRecord(ingestedTorrent);
|
||||
};
|
||||
private getMessageAsJson = (msg: ConsumeMessage): IIngestedTorrentAttributes => {
|
||||
const content = msg?.content.toString('utf8') ?? "{}";
|
||||
@@ -32,15 +44,15 @@ class ProcessTorrentsJob {
|
||||
const receivedTorrent: IIngestedRabbitTorrent = receivedObject.message;
|
||||
return {...receivedTorrent, info_hash: receivedTorrent.infoHash};
|
||||
};
|
||||
private async assertAndConsumeQueue(channel: Channel) {
|
||||
logger.info('Worker is running! Waiting for new torrents...');
|
||||
private assertAndConsumeQueue = async (channel: Channel) => {
|
||||
this.logger.info('Worker is running! Waiting for new torrents...');
|
||||
|
||||
const ackMsg = async (msg: ConsumeMessage) => {
|
||||
try {
|
||||
await this.processMessage(msg);
|
||||
channel.ack(msg);
|
||||
} catch (error) {
|
||||
logger.error('Failed processing torrent', error);
|
||||
this.logger.error('Failed processing torrent', error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,13 +61,7 @@ class ProcessTorrentsJob {
|
||||
await channel.prefetch(configurationService.jobConfig.JOB_CONCURRENCY);
|
||||
await channel.consume(configurationService.rabbitConfig.QUEUE_NAME, ackMsg, this.consumeQueueOptions);
|
||||
} catch (error) {
|
||||
logger.error('Failed to setup channel', error);
|
||||
this.logger.error('Failed to setup channel', error);
|
||||
}
|
||||
}
|
||||
|
||||
private test() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
export const processTorrentsJob = new ProcessTorrentsJob();
|
||||
};
|
||||
}
|
||||
3
src/node/consumer/src/lib/interfaces/composition_root.ts
Normal file
3
src/node/consumer/src/lib/interfaces/composition_root.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface ICompositionalRoot {
|
||||
start(): Promise<void>;
|
||||
}
|
||||
26
src/node/consumer/src/lib/models/composition_root.ts
Normal file
26
src/node/consumer/src/lib/models/composition_root.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IDatabaseRepository} from "../../repository/interfaces/database_repository";
|
||||
import {ITrackerService} from "../interfaces/tracker_service";
|
||||
import {IProcessTorrentsJob} from "../../interfaces/process_torrents_job";
|
||||
import {ICompositionalRoot} from "../interfaces/composition_root";
|
||||
import {IocTypes} from "./ioc_types";
|
||||
|
||||
@injectable()
|
||||
export class CompositionalRoot implements ICompositionalRoot {
|
||||
private trackerService: ITrackerService;
|
||||
private databaseRepository: IDatabaseRepository;
|
||||
private processTorrentsJob: IProcessTorrentsJob;
|
||||
constructor(@inject(IocTypes.ITrackerService) trackerService: ITrackerService,
|
||||
@inject(IocTypes.IDatabaseRepository) databaseRepository: IDatabaseRepository,
|
||||
@inject(IocTypes.IProcessTorrentsJob) processTorrentsJob: IProcessTorrentsJob) {
|
||||
this.trackerService = trackerService;
|
||||
this.databaseRepository = databaseRepository;
|
||||
this.processTorrentsJob = processTorrentsJob;
|
||||
}
|
||||
|
||||
start = async () => {
|
||||
await this.trackerService.getTrackers();
|
||||
await this.databaseRepository.connect();
|
||||
await this.processTorrentsJob.listenToQueue();
|
||||
};
|
||||
}
|
||||
44
src/node/consumer/src/lib/models/inversify_config.ts
Normal file
44
src/node/consumer/src/lib/models/inversify_config.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import "reflect-metadata"; // required
|
||||
import {Container} from "inversify";
|
||||
import { IocTypes } from "./ioc_types";
|
||||
import {ICacheService} from "../interfaces/cache_service";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
import {IMetadataService} from "../interfaces/metadata_service";
|
||||
import {ITorrentFileService} from "../interfaces/torrent_file_service";
|
||||
import {ITorrentProcessingService} from "../interfaces/torrent_processing_service";
|
||||
import {ITorrentSubtitleService} from "../interfaces/torrent_subtitle_service";
|
||||
import {ITorrentEntriesService} from "../interfaces/torrent_entries_service";
|
||||
import {ITorrentDownloadService} from "../interfaces/torrent_download_service";
|
||||
import {ITrackerService} from "../interfaces/tracker_service";
|
||||
import {IProcessTorrentsJob} from "../../interfaces/process_torrents_job";
|
||||
import {ICompositionalRoot} from "../interfaces/composition_root";
|
||||
import {IDatabaseRepository} from "../../repository/interfaces/database_repository";
|
||||
import {CompositionalRoot} from "./composition_root";
|
||||
import {CacheService} from "../services/cache_service";
|
||||
import {LoggingService} from "../services/logging_service";
|
||||
import {MetadataService} from "../services/metadata_service";
|
||||
import {TorrentDownloadService} from "../services/torrent_download_service";
|
||||
import {TorrentEntriesService} from "../services/torrent_entries_service";
|
||||
import {TorrentProcessingService} from "../services/torrent_processing_service";
|
||||
import {TorrentFileService} from "../services/torrent_file_service";
|
||||
import {TorrentSubtitleService} from "../services/torrent_subtitle_service";
|
||||
import {TrackerService} from "../services/tracker_service";
|
||||
import {DatabaseRepository} from "../../repository/database_repository";
|
||||
import {ProcessTorrentsJob} from "../../jobs/process_torrents_job";
|
||||
|
||||
const serviceContainer = new Container();
|
||||
|
||||
serviceContainer.bind<ICompositionalRoot>(IocTypes.ICompositionalRoot).to(CompositionalRoot).inSingletonScope();
|
||||
serviceContainer.bind<ICacheService>(IocTypes.ICacheService).to(CacheService).inSingletonScope();
|
||||
serviceContainer.bind<ITorrentFileService>(IocTypes.ITorrentFileService).to(TorrentFileService);
|
||||
serviceContainer.bind<ITorrentProcessingService>(IocTypes.ITorrentProcessingService).to(TorrentProcessingService);
|
||||
serviceContainer.bind<ITorrentSubtitleService>(IocTypes.ITorrentSubtitleService).to(TorrentSubtitleService);
|
||||
serviceContainer.bind<ITorrentEntriesService>(IocTypes.ITorrentEntriesService).to(TorrentEntriesService);
|
||||
serviceContainer.bind<ITorrentDownloadService>(IocTypes.ITorrentDownloadService).to(TorrentDownloadService);
|
||||
serviceContainer.bind<ILoggingService>(IocTypes.ILoggingService).to(LoggingService);
|
||||
serviceContainer.bind<IMetadataService>(IocTypes.IMetadataService).to(MetadataService);
|
||||
serviceContainer.bind<ITrackerService>(IocTypes.ITrackerService).to(TrackerService);
|
||||
serviceContainer.bind<IDatabaseRepository>(IocTypes.IDatabaseRepository).to(DatabaseRepository);
|
||||
serviceContainer.bind<IProcessTorrentsJob>(IocTypes.IProcessTorrentsJob).to(ProcessTorrentsJob);
|
||||
|
||||
export { serviceContainer };
|
||||
18
src/node/consumer/src/lib/models/ioc_types.ts
Normal file
18
src/node/consumer/src/lib/models/ioc_types.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export const IocTypes = {
|
||||
// Composition root
|
||||
ICompositionalRoot: Symbol.for("ICompositionalRoot"),
|
||||
// Services
|
||||
ICacheService: Symbol.for("ICacheService"),
|
||||
ILoggingService: Symbol.for("ILoggingService"),
|
||||
IMetadataService: Symbol.for("IMetadataService"),
|
||||
ITorrentDownloadService: Symbol.for("ITorrentDownloadService"),
|
||||
ITorrentEntriesService: Symbol.for("ITorrentEntriesService"),
|
||||
ITorrentFileService: Symbol.for("ITorrentFileService"),
|
||||
ITorrentProcessingService: Symbol.for("ITorrentProcessingService"),
|
||||
ITorrentSubtitleService: Symbol.for("ITorrentSubtitleService"),
|
||||
ITrackerService: Symbol.for("ITrackerService"),
|
||||
// DAL
|
||||
IDatabaseRepository: Symbol.for("IDatabaseRepository"),
|
||||
// Jobs
|
||||
IProcessTorrentsJob: Symbol.for("IProcessTorrentsJob"),
|
||||
};
|
||||
@@ -1,10 +1,12 @@
|
||||
import {Cache, createCache, memoryStore} from 'cache-manager';
|
||||
import {mongoDbStore} from '@tirke/node-cache-manager-mongodb'
|
||||
import {configurationService} from './configuration_service';
|
||||
import {logger} from './logging_service';
|
||||
import {CacheType} from "../enums/cache_types";
|
||||
import {ICacheOptions} from "../interfaces/cache_options";
|
||||
import {ICacheService} from "../interfaces/cache_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
|
||||
const GLOBAL_KEY_PREFIX = 'knightcrawler-consumer';
|
||||
const IMDB_ID_PREFIX = `${GLOBAL_KEY_PREFIX}|imdb_id`;
|
||||
@@ -18,10 +20,13 @@ const TRACKERS_TTL: number = 2 * 24 * 60 * 60; // 2 days
|
||||
|
||||
export type CacheMethod = () => any;
|
||||
|
||||
class CacheService implements ICacheService {
|
||||
constructor() {
|
||||
@injectable()
|
||||
export class CacheService implements ICacheService {
|
||||
private logger: ILoggingService;
|
||||
constructor(@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.logger = logger;
|
||||
if (!configurationService.cacheConfig.NO_CACHE) {
|
||||
logger.info('Cache is disabled');
|
||||
this.logger.info('Cache is disabled');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -64,7 +69,7 @@ class CacheService implements ICacheService {
|
||||
|
||||
private initiateRemoteCache = (): Cache => {
|
||||
if (configurationService.cacheConfig.NO_CACHE) {
|
||||
logger.debug('Cache is disabled');
|
||||
this.logger.debug('Cache is disabled');
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -93,13 +98,11 @@ class CacheService implements ICacheService {
|
||||
return method();
|
||||
}
|
||||
|
||||
logger.debug(`Cache type: ${cacheType}`);
|
||||
logger.debug(`Cache key: ${key}`);
|
||||
logger.debug(`Cache options: ${JSON.stringify(options)}`);
|
||||
this.logger.debug(`Cache type: ${cacheType}`);
|
||||
this.logger.debug(`Cache key: ${key}`);
|
||||
this.logger.debug(`Cache options: ${JSON.stringify(options)}`);
|
||||
|
||||
return cache.wrap(key, method, options.ttl);
|
||||
}
|
||||
}
|
||||
|
||||
export const cacheService: CacheService = new CacheService();
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import {Logger, pino} from "pino";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
import {injectable} from "inversify";
|
||||
|
||||
class LoggingService implements ILoggingService {
|
||||
@injectable()
|
||||
export class LoggingService implements ILoggingService {
|
||||
private readonly logger: Logger;
|
||||
|
||||
constructor() {
|
||||
@@ -10,21 +12,19 @@ class LoggingService implements ILoggingService {
|
||||
});
|
||||
}
|
||||
|
||||
public info(message: string, ...args: any[]): void {
|
||||
public info = (message: string, ...args: any[]): void => {
|
||||
this.logger.info(message, args);
|
||||
}
|
||||
};
|
||||
|
||||
public error(message: string, ...args: any[]): void {
|
||||
public error = (message: string, ...args: any[]): void => {
|
||||
this.logger.error(message, args);
|
||||
}
|
||||
};
|
||||
|
||||
public debug(message: string, ...args: any[]): void {
|
||||
public debug = (message: string, ...args: any[]): void => {
|
||||
this.logger.debug(message, args);
|
||||
}
|
||||
};
|
||||
|
||||
public warn(message: string, ...args: any[]): void {
|
||||
public warn = (message: string, ...args: any[]): void => {
|
||||
this.logger.warn(message, args);
|
||||
}
|
||||
}
|
||||
|
||||
export const logger = new LoggingService();
|
||||
};
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import axios, {AxiosResponse} from 'axios';
|
||||
import {ResultTypes, search} from 'google-sr';
|
||||
import nameToImdb from 'name-to-imdb';
|
||||
import {cacheService} from './cache_service';
|
||||
import {TorrentType} from '../enums/torrent_types';
|
||||
import {IMetadataResponse} from "../interfaces/metadata_response";
|
||||
import {ICinemetaJsonResponse} from "../interfaces/cinemeta_metadata";
|
||||
@@ -10,20 +9,29 @@ import {IKitsuJsonResponse} from "../interfaces/kitsu_metadata";
|
||||
import {IMetaDataQuery} from "../interfaces/metadata_query";
|
||||
import {IKitsuCatalogJsonResponse} from "../interfaces/kitsu_catalog_metadata";
|
||||
import {IMetadataService} from "../interfaces/metadata_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {ICacheService} from "../interfaces/cache_service";
|
||||
|
||||
const CINEMETA_URL = 'https://v3-cinemeta.strem.io';
|
||||
const KITSU_URL = 'https://anime-kitsu.strem.fun';
|
||||
const TIMEOUT = 20000;
|
||||
|
||||
class MetadataService implements IMetadataService {
|
||||
public async getKitsuId(info: IMetaDataQuery): Promise<string | Error> {
|
||||
@injectable()
|
||||
export class MetadataService implements IMetadataService {
|
||||
private cacheService: ICacheService;
|
||||
constructor(@inject(IocTypes.ICacheService) cacheService: ICacheService) {
|
||||
this.cacheService = cacheService;
|
||||
}
|
||||
|
||||
public getKitsuId = async (info: IMetaDataQuery): Promise<string | Error> => {
|
||||
const title = this.escapeTitle(info.title.replace(/\s\|\s.*/, ''));
|
||||
const year = info.year ? ` ${info.year}` : '';
|
||||
const season = info.season > 1 ? ` S${info.season}` : '';
|
||||
const key = `${title}${year}${season}`;
|
||||
const query = encodeURIComponent(key);
|
||||
|
||||
return cacheService.cacheWrapKitsuId(key,
|
||||
return this.cacheService.cacheWrapKitsuId(key,
|
||||
() => axios.get(`${KITSU_URL}/catalog/series/kitsu-anime-list/search=${query}.json`, {timeout: 60000})
|
||||
.then((response) => {
|
||||
const body = response.data as IKitsuCatalogJsonResponse;
|
||||
@@ -33,9 +41,9 @@ class MetadataService implements IMetadataService {
|
||||
throw new Error('No search results');
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
public async getImdbId(info: IMetaDataQuery): Promise<string | undefined> {
|
||||
public getImdbId = async (info: IMetaDataQuery): Promise<string | undefined> => {
|
||||
const name = this.escapeTitle(info.title);
|
||||
const year = info.year || (info.date && info.date.slice(0, 4));
|
||||
const key = `${name}_${year || 'NA'}_${info.type}`;
|
||||
@@ -44,7 +52,7 @@ class MetadataService implements IMetadataService {
|
||||
const googleQuery = year ? query : fallbackQuery;
|
||||
|
||||
try {
|
||||
const imdbId = await cacheService.cacheWrapImdbId(key,
|
||||
const imdbId = await this.cacheService.cacheWrapImdbId(key,
|
||||
() => this.getIMDbIdFromNameToImdb(name, info)
|
||||
);
|
||||
return imdbId && 'tt' + imdbId.replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0');
|
||||
@@ -52,16 +60,16 @@ class MetadataService implements IMetadataService {
|
||||
const imdbIdFallback = await this.getIMDbIdFromGoogle(googleQuery);
|
||||
return imdbIdFallback && 'tt' + imdbIdFallback.toString().replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public getMetadata(query: IMetaDataQuery): Promise<IMetadataResponse | Error> {
|
||||
public getMetadata = (query: IMetaDataQuery): Promise<IMetadataResponse | Error> => {
|
||||
if (!query.id) {
|
||||
return Promise.reject("no valid id provided");
|
||||
}
|
||||
|
||||
const key = Number.isInteger(query.id) || query.id.toString().match(/^\d+$/) ? `kitsu:${query.id}` : query.id;
|
||||
const metaType = query.type === TorrentType.Movie ? TorrentType.Movie : TorrentType.Series;
|
||||
return cacheService.cacheWrapMetadata(key.toString(), () => this.requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`)
|
||||
return this.cacheService.cacheWrapMetadata(key.toString(), () => this.requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`)
|
||||
.catch(() => this.requestMetadata(`${CINEMETA_URL}/meta/${metaType}/${key}.json`))
|
||||
.catch(() => {
|
||||
// try different type in case there was a mismatch
|
||||
@@ -71,30 +79,28 @@ class MetadataService implements IMetadataService {
|
||||
.catch((error) => {
|
||||
throw new Error(`failed metadata query ${key} due: ${error.message}`);
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
public async isEpisodeImdbId(imdbId: string | undefined): Promise<boolean> {
|
||||
public isEpisodeImdbId = async (imdbId: string | undefined): Promise<boolean> => {
|
||||
if (!imdbId) {
|
||||
return false;
|
||||
}
|
||||
return axios.get(`https://www.imdb.com/title/${imdbId}/`, {timeout: 10000})
|
||||
.then(response => !!(response.data && response.data.includes('video.episode')))
|
||||
.catch(() => false);
|
||||
}
|
||||
};
|
||||
|
||||
public escapeTitle(title: string): string {
|
||||
return title.toLowerCase()
|
||||
.normalize('NFKD') // normalize non-ASCII characters
|
||||
.replace(/[\u0300-\u036F]/g, '')
|
||||
.replace(/&/g, 'and')
|
||||
.replace(/[;, ~./]+/g, ' ') // replace dots, commas or underscores with spaces
|
||||
.replace(/[^\w \-()×+#@!'\u0400-\u04ff]+/g, '') // remove all non-alphanumeric chars
|
||||
.replace(/^\d{1,2}[.#\s]+(?=(?:\d+[.\s]*)?[\u0400-\u04ff])/i, '') // remove russian movie numbering
|
||||
.replace(/\s{2,}/, ' ') // replace multiple spaces
|
||||
.trim();
|
||||
}
|
||||
public escapeTitle = (title: string): string => title.toLowerCase()
|
||||
.normalize('NFKD') // normalize non-ASCII characters
|
||||
.replace(/[\u0300-\u036F]/g, '')
|
||||
.replace(/&/g, 'and')
|
||||
.replace(/[;, ~./]+/g, ' ') // replace dots, commas or underscores with spaces
|
||||
.replace(/[^\w \-()×+#@!'\u0400-\u04ff]+/g, '') // remove all non-alphanumeric chars
|
||||
.replace(/^\d{1,2}[.#\s]+(?=(?:\d+[.\s]*)?[\u0400-\u04ff])/i, '') // remove russian movie numbering
|
||||
.replace(/\s{2,}/, ' ') // replace multiple spaces
|
||||
.trim();
|
||||
|
||||
private async requestMetadata(url: string): Promise<IMetadataResponse> {
|
||||
private requestMetadata = async (url: string): Promise<IMetadataResponse> => {
|
||||
let response: AxiosResponse<any, any> = await axios.get(url, {timeout: TIMEOUT});
|
||||
let result: IMetadataResponse;
|
||||
const body = response.data;
|
||||
@@ -107,80 +113,74 @@ class MetadataService implements IMetadataService {
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
private handleCinemetaResponse(body: ICinemetaJsonResponse): IMetadataResponse {
|
||||
return {
|
||||
imdbId: parseInt(body.meta.imdb_id),
|
||||
type: body.meta.type,
|
||||
title: body.meta.name,
|
||||
year: parseInt(body.meta.year),
|
||||
country: body.meta.country,
|
||||
genres: body.meta.genres,
|
||||
status: body.meta.status,
|
||||
videos: body.meta.videos
|
||||
? body.meta.videos.map(video => ({
|
||||
name: video.name,
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
imdbSeason: video.season,
|
||||
imdbEpisode: video.episode,
|
||||
}))
|
||||
: [],
|
||||
episodeCount: body.meta.videos
|
||||
? this.getEpisodeCount(body.meta.videos)
|
||||
: [],
|
||||
totalCount: body.meta.videos
|
||||
? body.meta.videos.filter(
|
||||
entry => entry.season !== 0 && entry.episode !== 0
|
||||
).length
|
||||
: 0,
|
||||
};
|
||||
}
|
||||
private handleCinemetaResponse = (body: ICinemetaJsonResponse): IMetadataResponse => ({
|
||||
imdbId: parseInt(body.meta.imdb_id),
|
||||
type: body.meta.type,
|
||||
title: body.meta.name,
|
||||
year: parseInt(body.meta.year),
|
||||
country: body.meta.country,
|
||||
genres: body.meta.genres,
|
||||
status: body.meta.status,
|
||||
videos: body.meta.videos
|
||||
? body.meta.videos.map(video => ({
|
||||
name: video.name,
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
imdbSeason: video.season,
|
||||
imdbEpisode: video.episode,
|
||||
}))
|
||||
: [],
|
||||
episodeCount: body.meta.videos
|
||||
? this.getEpisodeCount(body.meta.videos)
|
||||
: [],
|
||||
totalCount: body.meta.videos
|
||||
? body.meta.videos.filter(
|
||||
entry => entry.season !== 0 && entry.episode !== 0
|
||||
).length
|
||||
: 0,
|
||||
});
|
||||
|
||||
private handleKitsuResponse(body: IKitsuJsonResponse): IMetadataResponse {
|
||||
return {
|
||||
kitsuId: parseInt(body.meta.kitsu_id),
|
||||
type: body.meta.type,
|
||||
title: body.meta.name,
|
||||
year: parseInt(body.meta.year),
|
||||
country: body.meta.country,
|
||||
genres: body.meta.genres,
|
||||
status: body.meta.status,
|
||||
videos: body.meta.videos
|
||||
? body.meta.videos.map(video => ({
|
||||
name: video.title,
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
kitsuId: video.id,
|
||||
kitsuEpisode: video.episode,
|
||||
released: video.released,
|
||||
}))
|
||||
: [],
|
||||
episodeCount: body.meta.videos
|
||||
? this.getEpisodeCount(body.meta.videos)
|
||||
: [],
|
||||
totalCount: body.meta.videos
|
||||
? body.meta.videos.filter(
|
||||
entry => entry.season !== 0 && entry.episode !== 0
|
||||
).length
|
||||
: 0,
|
||||
};
|
||||
}
|
||||
private handleKitsuResponse = (body: IKitsuJsonResponse): IMetadataResponse => ({
|
||||
kitsuId: parseInt(body.meta.kitsu_id),
|
||||
type: body.meta.type,
|
||||
title: body.meta.name,
|
||||
year: parseInt(body.meta.year),
|
||||
country: body.meta.country,
|
||||
genres: body.meta.genres,
|
||||
status: body.meta.status,
|
||||
videos: body.meta.videos
|
||||
? body.meta.videos.map(video => ({
|
||||
name: video.title,
|
||||
season: video.season,
|
||||
episode: video.episode,
|
||||
kitsuId: video.id,
|
||||
kitsuEpisode: video.episode,
|
||||
released: video.released,
|
||||
}))
|
||||
: [],
|
||||
episodeCount: body.meta.videos
|
||||
? this.getEpisodeCount(body.meta.videos)
|
||||
: [],
|
||||
totalCount: body.meta.videos
|
||||
? body.meta.videos.filter(
|
||||
entry => entry.season !== 0 && entry.episode !== 0
|
||||
).length
|
||||
: 0,
|
||||
});
|
||||
|
||||
private getEpisodeCount(videos: ICommonVideoMetadata[]) {
|
||||
return Object.values(
|
||||
videos
|
||||
.filter(entry => entry.season !== 0 && entry.episode !== 0)
|
||||
.sort((a, b) => a.season - b.season)
|
||||
.reduce((map, next) => {
|
||||
map[next.season] = map[next.season] + 1 || 1;
|
||||
return map;
|
||||
}, {})
|
||||
);
|
||||
}
|
||||
private getEpisodeCount = (videos: ICommonVideoMetadata[]) => Object.values(
|
||||
videos
|
||||
.filter(entry => entry.season !== 0 && entry.episode !== 0)
|
||||
.sort((a, b) => a.season - b.season)
|
||||
.reduce((map, next) => {
|
||||
map[next.season] = map[next.season] + 1 || 1;
|
||||
return map;
|
||||
}, {})
|
||||
);
|
||||
|
||||
private getIMDbIdFromNameToImdb(name: string, info: IMetaDataQuery): Promise<string | Error> {
|
||||
private getIMDbIdFromNameToImdb = (name: string, info: IMetaDataQuery): Promise<string | Error> => {
|
||||
const year = info.year;
|
||||
const type = info.type;
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -192,9 +192,9 @@ class MetadataService implements IMetadataService {
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private async getIMDbIdFromGoogle(query: string): Promise<string | undefined> {
|
||||
private getIMDbIdFromGoogle = async (query: string): Promise<string | undefined> => {
|
||||
try {
|
||||
const searchResults = await search({query: query});
|
||||
for (const result of searchResults) {
|
||||
@@ -211,8 +211,6 @@ class MetadataService implements IMetadataService {
|
||||
} catch (error) {
|
||||
throw new Error('Failed to find IMDb ID from Google search');
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const metadataService: MetadataService = new MetadataService();
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import {ISubtitleAttributes} from "../../repository/interfaces/subtitle_attribut
|
||||
import {IContentAttributes} from "../../repository/interfaces/content_attributes";
|
||||
import {parse} from "parse-torrent-title";
|
||||
import {ITorrentDownloadService} from "../interfaces/torrent_download_service";
|
||||
import {injectable} from "inversify";
|
||||
|
||||
interface ITorrentFile {
|
||||
name: string;
|
||||
@@ -17,7 +18,8 @@ interface ITorrentFile {
|
||||
fileIndex: number;
|
||||
}
|
||||
|
||||
class TorrentDownloadService implements ITorrentDownloadService {
|
||||
@injectable()
|
||||
export class TorrentDownloadService implements ITorrentDownloadService {
|
||||
private engineOptions: TorrentStream.TorrentEngineOptions = {
|
||||
connections: configurationService.torrentConfig.MAX_CONNECTIONS_PER_TORRENT,
|
||||
uploads: 0,
|
||||
@@ -26,7 +28,7 @@ class TorrentDownloadService implements ITorrentDownloadService {
|
||||
tracker: true,
|
||||
};
|
||||
|
||||
public async getTorrentFiles(torrent: IParsedTorrent, timeout: number = 30000): Promise<ITorrentFileCollection> {
|
||||
public getTorrentFiles = async (torrent: IParsedTorrent, timeout: number = 30000): Promise<ITorrentFileCollection> => {
|
||||
const torrentFiles: ITorrentFile[] = await this.filesFromTorrentStream(torrent, timeout);
|
||||
|
||||
const videos = this.filterVideos(torrent, torrentFiles);
|
||||
@@ -38,9 +40,9 @@ class TorrentDownloadService implements ITorrentDownloadService {
|
||||
videos: videos,
|
||||
subtitles: subtitles,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
private async filesFromTorrentStream(torrent: IParsedTorrent, timeout: number): Promise<ITorrentFile[]> {
|
||||
private filesFromTorrentStream = async (torrent: IParsedTorrent, timeout: number): Promise<ITorrentFile[]> => {
|
||||
if (!torrent.infoHash) {
|
||||
return Promise.reject(new Error("No infoHash..."));
|
||||
}
|
||||
@@ -72,9 +74,9 @@ class TorrentDownloadService implements ITorrentDownloadService {
|
||||
clearTimeout(timeoutId);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private filterVideos(torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): IFileAttributes[] {
|
||||
private filterVideos = (torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): IFileAttributes[] => {
|
||||
if (torrentFiles.length === 1 && !Number.isInteger(torrentFiles[0].fileIndex)) {
|
||||
return [this.mapTorrentFileToFileAttributes(torrent, torrentFiles[0])];
|
||||
}
|
||||
@@ -99,18 +101,14 @@ class TorrentDownloadService implements ITorrentDownloadService {
|
||||
.filter(video => !isRedundant(video))
|
||||
.filter(video => !isWatermark(video))
|
||||
.map(video => this.mapTorrentFileToFileAttributes(torrent, video));
|
||||
}
|
||||
};
|
||||
|
||||
private filterSubtitles(torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): ISubtitleAttributes[] {
|
||||
return torrentFiles.filter(file => ExtensionHelpers.isSubtitle(file.name || ''))
|
||||
.map(file => this.mapTorrentFileToSubtitleAttributes(torrent, file));
|
||||
}
|
||||
private filterSubtitles = (torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): ISubtitleAttributes[] => torrentFiles.filter(file => ExtensionHelpers.isSubtitle(file.name || ''))
|
||||
.map(file => this.mapTorrentFileToSubtitleAttributes(torrent, file));
|
||||
|
||||
private createContent(torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): IContentAttributes[] {
|
||||
return torrentFiles.map(file => this.mapTorrentFileToContentAttributes(torrent, file));
|
||||
}
|
||||
private createContent = (torrent: IParsedTorrent, torrentFiles: ITorrentFile[]): IContentAttributes[] => torrentFiles.map(file => this.mapTorrentFileToContentAttributes(torrent, file));
|
||||
|
||||
private mapTorrentFileToFileAttributes(torrent: IParsedTorrent, file: ITorrentFile): IFileAttributes {
|
||||
private mapTorrentFileToFileAttributes = (torrent: IParsedTorrent, file: ITorrentFile): IFileAttributes => {
|
||||
const videoFile: IFileAttributes = {
|
||||
title: file.name,
|
||||
size: file.length,
|
||||
@@ -124,27 +122,21 @@ class TorrentDownloadService implements ITorrentDownloadService {
|
||||
};
|
||||
|
||||
return {...videoFile, ...parse(file.name)};
|
||||
}
|
||||
};
|
||||
|
||||
private mapTorrentFileToSubtitleAttributes(torrent: IParsedTorrent, file: ITorrentFile): ISubtitleAttributes {
|
||||
return {
|
||||
title: file.name,
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
fileId: file.fileIndex,
|
||||
path: file.path,
|
||||
};
|
||||
}
|
||||
private mapTorrentFileToSubtitleAttributes = (torrent: IParsedTorrent, file: ITorrentFile): ISubtitleAttributes => ({
|
||||
title: file.name,
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
fileId: file.fileIndex,
|
||||
path: file.path,
|
||||
});
|
||||
|
||||
private mapTorrentFileToContentAttributes(torrent: IParsedTorrent, file: ITorrentFile): IContentAttributes {
|
||||
return {
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
path: file.path,
|
||||
size: file.length,
|
||||
};
|
||||
}
|
||||
private mapTorrentFileToContentAttributes = (torrent: IParsedTorrent, file: ITorrentFile): IContentAttributes => ({
|
||||
infoHash: torrent.infoHash,
|
||||
fileIndex: file.fileIndex,
|
||||
path: file.path,
|
||||
size: file.length,
|
||||
});
|
||||
}
|
||||
|
||||
export const torrentDownloadService = new TorrentDownloadService();
|
||||
|
||||
|
||||
@@ -1,21 +1,42 @@
|
||||
import {parse} from 'parse-torrent-title';
|
||||
import {IParsedTorrent} from "../interfaces/parsed_torrent";
|
||||
import {repository} from '../../repository/database_repository';
|
||||
import {TorrentType} from '../enums/torrent_types';
|
||||
import {ITorrentFileCollection} from "../interfaces/torrent_file_collection";
|
||||
import {Torrent} from "../../repository/models/torrent";
|
||||
import {PromiseHelpers} from '../helpers/promises_helpers';
|
||||
import {logger} from './logging_service';
|
||||
import {metadataService} from './metadata_service';
|
||||
import {torrentFileService} from './torrent_file_service';
|
||||
import {torrentSubtitleService} from './torrent_subtitle_service';
|
||||
import {ITorrentAttributes} from "../../repository/interfaces/torrent_attributes";
|
||||
import {File} from "../../repository/models/file";
|
||||
import {Subtitle} from "../../repository/models/subtitle";
|
||||
import {ITorrentEntriesService} from "../interfaces/torrent_entries_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {IMetadataService} from "../interfaces/metadata_service";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
import {ITorrentFileService} from "../interfaces/torrent_file_service";
|
||||
import {ITorrentSubtitleService} from "../interfaces/torrent_subtitle_service";
|
||||
import {IDatabaseRepository} from "../../repository/interfaces/database_repository";
|
||||
|
||||
class TorrentEntriesService implements ITorrentEntriesService {
|
||||
public async createTorrentEntry(torrent: IParsedTorrent, overwrite = false): Promise<void> {
|
||||
@injectable()
|
||||
export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
private metadataService: IMetadataService;
|
||||
private logger: ILoggingService;
|
||||
private fileService: ITorrentFileService;
|
||||
private subtitleService: ITorrentSubtitleService;
|
||||
private repository: IDatabaseRepository;
|
||||
|
||||
constructor(@inject(IocTypes.IMetadataService) metadataService: IMetadataService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService,
|
||||
@inject(IocTypes.ITorrentFileService) fileService: ITorrentFileService,
|
||||
@inject(IocTypes.ITorrentSubtitleService) torrentSubtitleService: ITorrentSubtitleService,
|
||||
@inject(IocTypes.IDatabaseRepository) repository: IDatabaseRepository) {
|
||||
this.metadataService = metadataService;
|
||||
this.logger = logger;
|
||||
this.fileService = fileService;
|
||||
this.subtitleService = torrentSubtitleService;
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public createTorrentEntry = async (torrent: IParsedTorrent, overwrite = false): Promise<void> => {
|
||||
const titleInfo = parse(torrent.title);
|
||||
|
||||
if (!torrent.imdbId && torrent.type !== TorrentType.Anime) {
|
||||
@@ -24,7 +45,7 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
year: titleInfo.year,
|
||||
type: torrent.type
|
||||
};
|
||||
torrent.imdbId = await metadataService.getImdbId(imdbQuery)
|
||||
torrent.imdbId = await this.metadataService.getImdbId(imdbQuery)
|
||||
.catch(() => undefined);
|
||||
}
|
||||
if (torrent.imdbId && torrent.imdbId.toString().length < 9) {
|
||||
@@ -41,25 +62,25 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
year: titleInfo.year,
|
||||
season: titleInfo.season,
|
||||
};
|
||||
torrent.kitsuId = await metadataService.getKitsuId(kitsuQuery)
|
||||
torrent.kitsuId = await this.metadataService.getKitsuId(kitsuQuery)
|
||||
.catch(() => undefined);
|
||||
}
|
||||
|
||||
if (!torrent.imdbId && !torrent.kitsuId && !torrentFileService.isPackTorrent(torrent)) {
|
||||
logger.warn(`imdbId or kitsuId not found: ${torrent.provider} ${torrent.title}`);
|
||||
if (!torrent.imdbId && !torrent.kitsuId && !this.fileService.isPackTorrent(torrent)) {
|
||||
this.logger.warn(`imdbId or kitsuId not found: ${torrent.provider} ${torrent.title}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const fileCollection: ITorrentFileCollection = await torrentFileService.parseTorrentFiles(torrent)
|
||||
const fileCollection: ITorrentFileCollection = await this.fileService.parseTorrentFiles(torrent)
|
||||
.then((torrentContents: ITorrentFileCollection) => overwrite ? this.overwriteExistingFiles(torrent, torrentContents) : torrentContents)
|
||||
.then((torrentContents: ITorrentFileCollection) => torrentSubtitleService.assignSubtitles(torrentContents))
|
||||
.then((torrentContents: ITorrentFileCollection) =>this.subtitleService.assignSubtitles(torrentContents))
|
||||
.catch(error => {
|
||||
logger.warn(`Failed getting files for ${torrent.title}`, error.message);
|
||||
this.logger.warn(`Failed getting files for ${torrent.title}`, error.message);
|
||||
return {};
|
||||
});
|
||||
|
||||
if (!fileCollection.videos || !fileCollection.videos.length) {
|
||||
logger.warn(`no video files found for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`);
|
||||
this.logger.warn(`no video files found for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -69,31 +90,27 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
subtitles: fileCollection.subtitles
|
||||
});
|
||||
|
||||
return repository.createTorrent(newTorrent)
|
||||
return this.repository.createTorrent(newTorrent)
|
||||
.then(() => PromiseHelpers.sequence(fileCollection.videos.map(video => () => {
|
||||
const newVideo = File.build(video);
|
||||
return repository.createFile(newVideo)
|
||||
return this.repository.createFile(newVideo)
|
||||
})))
|
||||
.then(() => logger.info(`Created ${torrent.provider} entry for [${torrent.infoHash}] ${torrent.title}`));
|
||||
}
|
||||
.then(() => this.logger.info(`Created ${torrent.provider} entry for [${torrent.infoHash}] ${torrent.title}`));
|
||||
};
|
||||
|
||||
public async createSkipTorrentEntry(torrent: Torrent) {
|
||||
return repository.createSkipTorrent(torrent);
|
||||
}
|
||||
public createSkipTorrentEntry = async (torrent: Torrent) => this.repository.createSkipTorrent(torrent);
|
||||
|
||||
public async getStoredTorrentEntry(torrent: Torrent) {
|
||||
return repository.getSkipTorrent(torrent.infoHash)
|
||||
.catch(() => repository.getTorrent(torrent))
|
||||
.catch(() => undefined);
|
||||
}
|
||||
public getStoredTorrentEntry = async (torrent: Torrent) => this.repository.getSkipTorrent(torrent.infoHash)
|
||||
.catch(() => this.repository.getTorrent(torrent))
|
||||
.catch(() => undefined);
|
||||
|
||||
public async checkAndUpdateTorrent(torrent: IParsedTorrent): Promise<boolean> {
|
||||
public checkAndUpdateTorrent = async (torrent: IParsedTorrent): Promise<boolean> => {
|
||||
const query: ITorrentAttributes = {
|
||||
infoHash: torrent.infoHash,
|
||||
provider: torrent.provider,
|
||||
}
|
||||
|
||||
const existingTorrent = await repository.getTorrent(query).catch(() => undefined);
|
||||
const existingTorrent = await this.repository.getTorrent(query).catch(() => undefined);
|
||||
|
||||
if (!existingTorrent) {
|
||||
return false;
|
||||
@@ -110,18 +127,18 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
if (!existingTorrent.languages && torrent.languages && existingTorrent.provider !== 'RARBG') {
|
||||
existingTorrent.languages = torrent.languages;
|
||||
await existingTorrent.save();
|
||||
logger.debug(`Updated [${existingTorrent.infoHash}] ${existingTorrent.title} language to ${torrent.languages}`);
|
||||
this.logger.debug(`Updated [${existingTorrent.infoHash}] ${existingTorrent.title} language to ${torrent.languages}`);
|
||||
}
|
||||
return this.createTorrentContents(existingTorrent)
|
||||
.then(() => this.updateTorrentSeeders(existingTorrent));
|
||||
}
|
||||
};
|
||||
|
||||
public async createTorrentContents(torrent: Torrent) {
|
||||
public createTorrentContents = async (torrent: Torrent) => {
|
||||
if (torrent.opened) {
|
||||
return;
|
||||
}
|
||||
|
||||
const storedVideos: File[] = await repository.getFiles(torrent.infoHash).catch(() => []);
|
||||
const storedVideos: File[] = await this.repository.getFiles(torrent.infoHash).catch(() => []);
|
||||
if (!storedVideos || !storedVideos.length) {
|
||||
return;
|
||||
}
|
||||
@@ -129,12 +146,12 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
const imdbId: string | undefined = PromiseHelpers.mostCommonValue(storedVideos.map(stored => stored.imdbId));
|
||||
const kitsuId: number | undefined = PromiseHelpers.mostCommonValue(storedVideos.map(stored => stored.kitsuId));
|
||||
|
||||
const fileCollection: ITorrentFileCollection = await torrentFileService.parseTorrentFiles(torrent)
|
||||
const fileCollection: ITorrentFileCollection = await this.fileService.parseTorrentFiles(torrent)
|
||||
.then(torrentContents => notOpenedVideo ? torrentContents : {...torrentContents, videos: storedVideos})
|
||||
.then(torrentContents => torrentSubtitleService.assignSubtitles(torrentContents))
|
||||
.then(torrentContents => this.subtitleService.assignSubtitles(torrentContents))
|
||||
.then(torrentContents => this.assignMetaIds(torrentContents, imdbId, kitsuId))
|
||||
.catch(error => {
|
||||
logger.warn(`Failed getting contents for [${torrent.infoHash}] ${torrent.title}`, error.message);
|
||||
this.logger.warn(`Failed getting contents for [${torrent.infoHash}] ${torrent.title}`, error.message);
|
||||
return {};
|
||||
});
|
||||
|
||||
@@ -161,35 +178,35 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
subtitles: fileCollection.subtitles
|
||||
});
|
||||
|
||||
return repository.createTorrent(newTorrent)
|
||||
return this.repository.createTorrent(newTorrent)
|
||||
.then(() => {
|
||||
if (shouldDeleteOld) {
|
||||
logger.debug(`Deleting old video for [${torrent.infoHash}] ${torrent.title}`)
|
||||
this.logger.debug(`Deleting old video for [${torrent.infoHash}] ${torrent.title}`)
|
||||
return storedVideos[0].destroy();
|
||||
}
|
||||
return Promise.resolve();
|
||||
})
|
||||
.then(() => PromiseHelpers.sequence(fileCollection.videos.map(video => () => {
|
||||
const newVideo = File.build(video);
|
||||
return repository.createFile(newVideo)
|
||||
return this.repository.createFile(newVideo)
|
||||
})))
|
||||
.then(() => logger.info(`Created contents for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`))
|
||||
.catch(error => logger.error(`Failed saving contents for [${torrent.infoHash}] ${torrent.title}`, error));
|
||||
}
|
||||
.then(() => this.logger.info(`Created contents for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`))
|
||||
.catch(error => this.logger.error(`Failed saving contents for [${torrent.infoHash}] ${torrent.title}`, error));
|
||||
};
|
||||
|
||||
public async updateTorrentSeeders(torrent: ITorrentAttributes) {
|
||||
public updateTorrentSeeders = async (torrent: ITorrentAttributes) => {
|
||||
if (!(torrent.infoHash || (torrent.provider && torrent.torrentId)) || !Number.isInteger(torrent.seeders)) {
|
||||
return torrent;
|
||||
}
|
||||
|
||||
return repository.setTorrentSeeders(torrent, torrent.seeders)
|
||||
return this.repository.setTorrentSeeders(torrent, torrent.seeders)
|
||||
.catch(error => {
|
||||
logger.warn('Failed updating seeders:', error);
|
||||
this.logger.warn('Failed updating seeders:', error);
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private assignMetaIds(fileCollection: ITorrentFileCollection, imdbId: string, kitsuId: number): ITorrentFileCollection {
|
||||
private assignMetaIds = (fileCollection: ITorrentFileCollection, imdbId: string, kitsuId: number): ITorrentFileCollection => {
|
||||
if (fileCollection.videos && fileCollection.videos.length) {
|
||||
fileCollection.videos.forEach(video => {
|
||||
video.imdbId = imdbId;
|
||||
@@ -198,12 +215,12 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
}
|
||||
|
||||
return fileCollection;
|
||||
}
|
||||
};
|
||||
|
||||
private async overwriteExistingFiles(torrent: IParsedTorrent, torrentContents: ITorrentFileCollection) {
|
||||
private overwriteExistingFiles = async (torrent: IParsedTorrent, torrentContents: ITorrentFileCollection) => {
|
||||
const videos = torrentContents && torrentContents.videos;
|
||||
if (videos && videos.length) {
|
||||
const existingFiles = await repository.getFiles(torrent.infoHash)
|
||||
const existingFiles = await this.repository.getFiles(torrent.infoHash)
|
||||
.then((existing) => existing
|
||||
.reduce((map, next) => {
|
||||
const fileIndex = next.fileIndex !== undefined ? next.fileIndex : null;
|
||||
@@ -228,7 +245,5 @@ class TorrentEntriesService implements ITorrentEntriesService {
|
||||
return torrentContents;
|
||||
}
|
||||
return Promise.reject(`No video files found for: ${torrent.title}`);
|
||||
}
|
||||
}
|
||||
|
||||
export const torrentEntriesService = new TorrentEntriesService();
|
||||
};
|
||||
}
|
||||
@@ -5,9 +5,6 @@ import {PromiseHelpers} from '../helpers/promises_helpers';
|
||||
import {TorrentType} from '../enums/torrent_types';
|
||||
import {configurationService} from './configuration_service';
|
||||
import {ExtensionHelpers} from '../helpers/extension_helpers';
|
||||
import {metadataService} from './metadata_service';
|
||||
import {torrentDownloadService} from "./torrent_download_service";
|
||||
import {logger} from "./logging_service";
|
||||
import {IMetadataResponse} from "../interfaces/metadata_response";
|
||||
import {IMetaDataQuery} from "../interfaces/metadata_query";
|
||||
import {ICommonVideoMetadata} from "../interfaces/common_video_metadata";
|
||||
@@ -16,23 +13,41 @@ import {IParsedTorrent} from "../interfaces/parsed_torrent";
|
||||
import {IFileAttributes} from "../../repository/interfaces/file_attributes";
|
||||
import {IContentAttributes} from "../../repository/interfaces/content_attributes";
|
||||
import {ITorrentFileService} from "../interfaces/torrent_file_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {IMetadataService} from "../interfaces/metadata_service";
|
||||
import {ITorrentDownloadService} from "../interfaces/torrent_download_service";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
|
||||
const MIN_SIZE: number = 5 * 1024 * 1024; // 5 MB
|
||||
const MULTIPLE_FILES_SIZE = 4 * 1024 * 1024 * 1024; // 4 GB
|
||||
|
||||
class TorrentFileService implements ITorrentFileService {
|
||||
@injectable()
|
||||
export class TorrentFileService implements ITorrentFileService {
|
||||
private metadataService: IMetadataService;
|
||||
private torrentDownloadService: ITorrentDownloadService;
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.IMetadataService) metadataService: IMetadataService,
|
||||
@inject(IocTypes.ITorrentDownloadService) torrentDownloadService: ITorrentDownloadService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.metadataService = metadataService;
|
||||
this.torrentDownloadService = torrentDownloadService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
private readonly imdb_limiter: Bottleneck = new Bottleneck({
|
||||
maxConcurrent: configurationService.metadataConfig.IMDB_CONCURRENT,
|
||||
minTime: configurationService.metadataConfig.IMDB_INTERVAL_MS
|
||||
});
|
||||
|
||||
public async parseTorrentFiles(torrent: IParsedTorrent): Promise<ITorrentFileCollection> {
|
||||
public parseTorrentFiles = async (torrent: IParsedTorrent): Promise<ITorrentFileCollection> => {
|
||||
const parsedTorrentName = parse(torrent.title);
|
||||
const query: IMetaDataQuery = {
|
||||
id: torrent.kitsuId || torrent.imdbId,
|
||||
type: torrent.type || TorrentType.Movie,
|
||||
};
|
||||
const metadata = await metadataService.getMetadata(query)
|
||||
const metadata = await this.metadataService.getMetadata(query)
|
||||
.then(meta => Object.assign({}, meta))
|
||||
.catch(() => undefined);
|
||||
|
||||
@@ -47,9 +62,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
}
|
||||
|
||||
return this.parseSeriesFiles(torrent, metadata)
|
||||
}
|
||||
};
|
||||
|
||||
public isPackTorrent(torrent: IParsedTorrent): boolean {
|
||||
public isPackTorrent = (torrent: IParsedTorrent): boolean => {
|
||||
if (torrent.isPack) {
|
||||
return true;
|
||||
}
|
||||
@@ -64,17 +79,17 @@ class TorrentFileService implements ITorrentFileService {
|
||||
(parsedInfo.seasons && !parsedInfo.episodes);
|
||||
const hasSingleEpisode = Number.isInteger(parsedInfo.episode) || (!parsedInfo.episodes && parsedInfo.date);
|
||||
return hasMultipleEpisodes && !hasSingleEpisode;
|
||||
}
|
||||
};
|
||||
|
||||
private parseSeriesVideos(torrent: IParsedTorrent, videos: IFileAttributes[]): IFileAttributes[] {
|
||||
private parseSeriesVideos = (torrent: IParsedTorrent, videos: IFileAttributes[]): IFileAttributes[] => {
|
||||
const parsedTorrentName = parse(torrent.title);
|
||||
const hasMovies = parsedTorrentName.complete || !!torrent.title.match(/movies?(?:\W|$)/i);
|
||||
const parsedVideos = videos.map(video => this.parseSeriesVideo(video));
|
||||
|
||||
return parsedVideos.map(video => ({ ...video, isMovie: this.isMovieVideo(torrent, video, parsedVideos, hasMovies) }));
|
||||
}
|
||||
};
|
||||
|
||||
private async parseMovieFiles(torrent: IParsedTorrent, metadata: IMetadataResponse): Promise<ITorrentFileCollection> {
|
||||
private parseMovieFiles = async (torrent: IParsedTorrent, metadata: IMetadataResponse): Promise<ITorrentFileCollection> => {
|
||||
const fileCollection: ITorrentFileCollection = await this.getMoviesTorrentContent(torrent);
|
||||
const filteredVideos = fileCollection.videos
|
||||
.filter(video => video.size > MIN_SIZE)
|
||||
@@ -102,9 +117,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
imdbId: video.imdbId,
|
||||
})));
|
||||
return {...fileCollection, videos: parsedVideos};
|
||||
}
|
||||
};
|
||||
|
||||
private async parseSeriesFiles(torrent: IParsedTorrent, metadata: IMetadataResponse): Promise<ITorrentFileCollection> {
|
||||
private parseSeriesFiles = async (torrent: IParsedTorrent, metadata: IMetadataResponse): Promise<ITorrentFileCollection> => {
|
||||
const fileCollection: ITorrentFileCollection = await this.getSeriesTorrentContent(torrent);
|
||||
const parsedVideos: IFileAttributes[] = await Promise.resolve(fileCollection.videos)
|
||||
.then(videos => videos.filter(video => videos.length === 1 || video.size > MIN_SIZE))
|
||||
@@ -118,10 +133,10 @@ class TorrentFileService implements ITorrentFileService {
|
||||
.reduce((a, b) => a.concat(b), [])
|
||||
.map(video => this.isFeaturette(video) ? this.clearInfoFields(video) : video));
|
||||
return {...torrent.fileCollection, videos: parsedVideos};
|
||||
}
|
||||
};
|
||||
|
||||
private async getMoviesTorrentContent(torrent: IParsedTorrent): Promise<ITorrentFileCollection> {
|
||||
const files = await torrentDownloadService.getTorrentFiles(torrent)
|
||||
private getMoviesTorrentContent = async (torrent: IParsedTorrent): Promise<ITorrentFileCollection> => {
|
||||
const files = await this.torrentDownloadService.getTorrentFiles(torrent, configurationService.torrentConfig.TIMEOUT)
|
||||
.catch(error => {
|
||||
if (!this.isPackTorrent(torrent)) {
|
||||
const entries = [{name: torrent.title, path: torrent.title, size: torrent.size, fileIndex: null}];
|
||||
@@ -134,23 +149,24 @@ class TorrentFileService implements ITorrentFileService {
|
||||
files.videos = [{name: torrent.title, path: torrent.title, size: torrent.size, fileIndex: null}];
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
private getDefaultFileEntries(torrent: IParsedTorrent): IFileAttributes[] {
|
||||
return [{title: torrent.title, path: torrent.title, size: torrent.size, fileIndex: null}];
|
||||
}
|
||||
};
|
||||
|
||||
private async getSeriesTorrentContent(torrent: IParsedTorrent): Promise<ITorrentFileCollection> {
|
||||
return torrentDownloadService.getTorrentFiles(torrent)
|
||||
.catch(error => {
|
||||
if (!this.isPackTorrent(torrent)) {
|
||||
return { videos: this.getDefaultFileEntries(torrent), subtitles: [], contents: [] }
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
private getDefaultFileEntries = (torrent: IParsedTorrent): IFileAttributes[] => [{
|
||||
title: torrent.title,
|
||||
path: torrent.title,
|
||||
size: torrent.size,
|
||||
fileIndex: null
|
||||
}];
|
||||
|
||||
private async mapSeriesEpisode(torrent: IParsedTorrent, file: IFileAttributes, files: IFileAttributes[]) : Promise<IFileAttributes[]> {
|
||||
private getSeriesTorrentContent = async (torrent: IParsedTorrent): Promise<ITorrentFileCollection> => this.torrentDownloadService.getTorrentFiles(torrent, configurationService.torrentConfig.TIMEOUT)
|
||||
.catch(error => {
|
||||
if (!this.isPackTorrent(torrent)) {
|
||||
return {videos: this.getDefaultFileEntries(torrent), subtitles: [], contents: []}
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
|
||||
private mapSeriesEpisode = async (torrent: IParsedTorrent, file: IFileAttributes, files: IFileAttributes[]): Promise<IFileAttributes[]> => {
|
||||
if (!file.episodes && !file.episodes) {
|
||||
if (files.length === 1 || files.some(f => f.episodes || f.episodes) || parse(torrent.title).seasons) {
|
||||
return Promise.resolve([{
|
||||
@@ -178,13 +194,13 @@ class TorrentFileService implements ITorrentFileService {
|
||||
episodes: file.episodes,
|
||||
kitsuId: parseInt(file.kitsuId.toString() || torrent.kitsuId.toString()),
|
||||
})))
|
||||
}
|
||||
};
|
||||
|
||||
private async mapSeriesMovie(torrent: IParsedTorrent, file: IFileAttributes): Promise<IFileAttributes[]> {
|
||||
private mapSeriesMovie = async (torrent: IParsedTorrent, file: IFileAttributes): Promise<IFileAttributes[]> => {
|
||||
const kitsuId= torrent.type === TorrentType.Anime ? await this.findMovieKitsuId(file)
|
||||
.then(result => {
|
||||
if (result instanceof Error) {
|
||||
logger.warn(`Failed to retrieve kitsuId due to error: ${result.message}`);
|
||||
this.logger.warn(`Failed to retrieve kitsuId due to error: ${result.message}`);
|
||||
return undefined;
|
||||
}
|
||||
return result;
|
||||
@@ -197,9 +213,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
type: TorrentType.Movie
|
||||
};
|
||||
|
||||
const metadataOrError = await metadataService.getMetadata(query);
|
||||
const metadataOrError = await this.metadataService.getMetadata(query);
|
||||
if (metadataOrError instanceof Error) {
|
||||
logger.warn(`Failed to retrieve metadata due to error: ${metadataOrError.message}`);
|
||||
this.logger.warn(`Failed to retrieve metadata due to error: ${metadataOrError.message}`);
|
||||
// return default result or throw error, depending on your use case
|
||||
return [{
|
||||
infoHash: torrent.infoHash,
|
||||
@@ -229,9 +245,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
imdbEpisode: episodeVideo && metadata.imdbId | metadata.kitsuId ? episodeVideo.episode || episodeVideo.episode : undefined,
|
||||
kitsuEpisode: episodeVideo && metadata.imdbId | metadata.kitsuId ? episodeVideo.episode || episodeVideo.episode : undefined,
|
||||
}];
|
||||
}
|
||||
};
|
||||
|
||||
private async decomposeEpisodes(torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse = { episodeCount: [] }) {
|
||||
private decomposeEpisodes = async (torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse = { episodeCount: [] }) => {
|
||||
if (files.every(file => !file.episodes && !file.date)) {
|
||||
return files;
|
||||
}
|
||||
@@ -274,9 +290,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
// decomposeEpisodeTitleFiles(torrent, files, metadata);
|
||||
|
||||
return files;
|
||||
}
|
||||
};
|
||||
|
||||
private preprocessEpisodes(files: IFileAttributes[]) {
|
||||
private preprocessEpisodes = (files: IFileAttributes[]) => {
|
||||
// reverse special episode naming when they named with 0 episode, ie. S02E00
|
||||
files
|
||||
.filter(file => Number.isInteger(file.season) && file.episode === 0)
|
||||
@@ -285,9 +301,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
file.episodes = [file.season]
|
||||
file.season = 0;
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
private isConcatSeasonAndEpisodeFiles(files: IFileAttributes[], sortedEpisodes: number[], metadata: IMetadataResponse) {
|
||||
private isConcatSeasonAndEpisodeFiles = (files: IFileAttributes[], sortedEpisodes: number[], metadata: IMetadataResponse) => {
|
||||
if (metadata.kitsuId !== undefined) {
|
||||
// anime does not use this naming scheme in 99% of cases;
|
||||
return false;
|
||||
@@ -312,13 +328,11 @@ class TorrentFileService implements ITorrentFileService {
|
||||
.filter(file => file.episodes.every(ep => ep > metadata.totalCount));
|
||||
return sortedConcatEpisodes.length >= thresholdSorted && concatFileEpisodes.length >= threshold
|
||||
|| concatAboveTotalEpisodeCount.length >= thresholdAbove;
|
||||
}
|
||||
};
|
||||
|
||||
private isDateEpisodeFiles(files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
return files.every(file => (!file.season || !metadata.episodeCount[file.season - 1]) && file.date);
|
||||
}
|
||||
private isDateEpisodeFiles = (files: IFileAttributes[], metadata: IMetadataResponse) => files.every(file => (!file.season || !metadata.episodeCount[file.season - 1]) && file.date);
|
||||
|
||||
private isAbsoluteEpisodeFiles(torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private isAbsoluteEpisodeFiles = (torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
const threshold = Math.ceil(files.length / 5);
|
||||
const isAnime = torrent.type === TorrentType.Anime && torrent.kitsuId;
|
||||
const nonMovieEpisodes = files
|
||||
@@ -329,9 +343,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
return nonMovieEpisodes.every(file => !file.season)
|
||||
|| (isAnime && nonMovieEpisodes.every(file => file.season > metadata.episodeCount.length))
|
||||
|| absoluteEpisodes.length >= threshold;
|
||||
}
|
||||
};
|
||||
|
||||
private isNewEpisodeNotInMetadata(torrent: IParsedTorrent, video: IFileAttributes, metadata: IMetadataResponse) {
|
||||
private isNewEpisodeNotInMetadata = (torrent: IParsedTorrent, video: IFileAttributes, metadata: IMetadataResponse) => {
|
||||
// new episode might not yet been indexed by cinemeta.
|
||||
// detect this if episode number is larger than the last episode or season is larger than the last one
|
||||
// only for non anime metas
|
||||
@@ -340,9 +354,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
&& /continuing|current/i.test(metadata.status)
|
||||
&& video.season >= metadata.episodeCount.length
|
||||
&& video.episodes.every(ep => ep > (metadata.episodeCount[video.season - 1] || 0));
|
||||
}
|
||||
};
|
||||
|
||||
private decomposeConcatSeasonAndEpisodeFiles(files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private decomposeConcatSeasonAndEpisodeFiles = (files: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
files
|
||||
.filter(file => file.episodes && file.season !== 0 && file.episodes.every(ep => ep > 100))
|
||||
.filter(file => metadata.episodeCount[(file.season || this.div100(file.episodes[0])) - 1] < 100)
|
||||
@@ -352,9 +366,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
file.episodes = file.episodes.map(ep => this.mod100(ep))
|
||||
});
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
private decomposeAbsoluteEpisodeFiles(torrent: IParsedTorrent, videos: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private decomposeAbsoluteEpisodeFiles = (torrent: IParsedTorrent, videos: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
if (metadata.episodeCount.length === 0) {
|
||||
videos
|
||||
.filter(file => !Number.isInteger(file.season) && file.episodes && !file.isMovie)
|
||||
@@ -376,9 +390,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
file.episodes = file.episodes
|
||||
.map(ep => ep - metadata.episodeCount.slice(0, seasonIdx).reduce((a, b) => a + b, 0))
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private decomposeDateEpisodeFiles(files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private decomposeDateEpisodeFiles = (files: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
if (!metadata || !metadata.videos || !metadata.videos.length) {
|
||||
return;
|
||||
}
|
||||
@@ -400,9 +414,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
file.episodes = [video.episode];
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private getTimeZoneOffset(country: string | undefined) {
|
||||
private getTimeZoneOffset = (country: string | undefined) => {
|
||||
switch (country) {
|
||||
case 'United States':
|
||||
case 'USA':
|
||||
@@ -410,9 +424,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
default:
|
||||
return '00:00';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private assignKitsuOrImdbEpisodes(torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private assignKitsuOrImdbEpisodes = (torrent: IParsedTorrent, files: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
if (!metadata || !metadata.videos || !metadata.videos.length) {
|
||||
if (torrent.type === TorrentType.Anime) {
|
||||
// assign episodes as kitsu episodes for anime when no metadata available for imdb mapping
|
||||
@@ -493,9 +507,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
});
|
||||
}
|
||||
return files;
|
||||
}
|
||||
};
|
||||
|
||||
private needsCinemetaMetadataForAnime(files: IFileAttributes[], metadata: IMetadataResponse) {
|
||||
private needsCinemetaMetadataForAnime = (files: IFileAttributes[], metadata: IMetadataResponse) => {
|
||||
if (!metadata || !metadata.imdbId || !metadata.videos || !metadata.videos.length) {
|
||||
return false;
|
||||
}
|
||||
@@ -509,19 +523,19 @@ class TorrentFileService implements ITorrentFileService {
|
||||
return differentSeasons > 1 || files
|
||||
.filter(file => !file.isMovie && file.episodes)
|
||||
.some(file => file.season < minSeason || file.season > maxSeason || file.episodes.every(ep => ep > total));
|
||||
}
|
||||
};
|
||||
|
||||
private async updateToCinemetaMetadata(metadata: IMetadataResponse) {
|
||||
private updateToCinemetaMetadata = async (metadata: IMetadataResponse) => {
|
||||
const query: IMetaDataQuery = {
|
||||
id: metadata.imdbId,
|
||||
type: metadata.type
|
||||
};
|
||||
|
||||
return await metadataService.getMetadata(query)
|
||||
return await this.metadataService.getMetadata(query)
|
||||
.then((newMetadataOrError) => {
|
||||
if (newMetadataOrError instanceof Error) {
|
||||
// handle error
|
||||
logger.warn(`Failed ${metadata.imdbId} metadata cinemeta update due: ${newMetadataOrError.message}`);
|
||||
this.logger.warn(`Failed ${metadata.imdbId} metadata cinemeta update due: ${newMetadataOrError.message}`);
|
||||
return metadata; // or throw newMetadataOrError to propagate error up the call stack
|
||||
}
|
||||
// At this point TypeScript infers newMetadataOrError to be of type MetadataResponse
|
||||
@@ -535,11 +549,11 @@ class TorrentFileService implements ITorrentFileService {
|
||||
return metadata;
|
||||
}
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
private findMovieImdbId(title: IFileAttributes | string) {
|
||||
private findMovieImdbId = (title: IFileAttributes | string) => {
|
||||
const parsedTitle = typeof title === 'string' ? parse(title) : title;
|
||||
logger.debug(`Finding movie imdbId for ${title}`);
|
||||
this.logger.debug(`Finding movie imdbId for ${title}`);
|
||||
return this.imdb_limiter.schedule(async () => {
|
||||
const imdbQuery = {
|
||||
title: parsedTitle.title,
|
||||
@@ -547,14 +561,14 @@ class TorrentFileService implements ITorrentFileService {
|
||||
type: TorrentType.Movie
|
||||
};
|
||||
try {
|
||||
return await metadataService.getImdbId(imdbQuery);
|
||||
return await this.metadataService.getImdbId(imdbQuery);
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
private async findMovieKitsuId(title: IFileAttributes | string) {
|
||||
private findMovieKitsuId = async (title: IFileAttributes | string) => {
|
||||
const parsedTitle = typeof title === 'string' ? parse(title) : title;
|
||||
const kitsuQuery = {
|
||||
title: parsedTitle.title,
|
||||
@@ -563,28 +577,22 @@ class TorrentFileService implements ITorrentFileService {
|
||||
type: TorrentType.Movie
|
||||
};
|
||||
try {
|
||||
return await metadataService.getKitsuId(kitsuQuery);
|
||||
return await this.metadataService.getKitsuId(kitsuQuery);
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private isDiskTorrent(contents: IContentAttributes[]) {
|
||||
return contents.some(content => ExtensionHelpers.isDisk(content.path));
|
||||
}
|
||||
private isDiskTorrent = (contents: IContentAttributes[]) => contents.some(content => ExtensionHelpers.isDisk(content.path));
|
||||
|
||||
private isSingleMovie(videos: IFileAttributes[]) {
|
||||
return videos.length === 1 ||
|
||||
(videos.length === 2 &&
|
||||
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?1\b|^0?1\.\w{2,4}$/i.test(v.path)) &&
|
||||
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?2\b|^0?2\.\w{2,4}$/i.test(v.path)));
|
||||
}
|
||||
private isSingleMovie = (videos: IFileAttributes[]) => videos.length === 1 ||
|
||||
(videos.length === 2 &&
|
||||
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?1\b|^0?1\.\w{2,4}$/i.test(v.path)) &&
|
||||
videos.find(v => /\b(?:part|disc|cd)[ ._-]?0?2\b|^0?2\.\w{2,4}$/i.test(v.path)));
|
||||
|
||||
private isFeaturette(video: IFileAttributes) {
|
||||
return /featurettes?\/|extras-grym/i.test(video.path);
|
||||
}
|
||||
private isFeaturette = (video: IFileAttributes) => /featurettes?\/|extras-grym/i.test(video.path);
|
||||
|
||||
private parseSeriesVideo(video: IFileAttributes): IFileAttributes {
|
||||
private parseSeriesVideo = (video: IFileAttributes): IFileAttributes => {
|
||||
const videoInfo = parse(video.title);
|
||||
// the episode may be in a folder containing season number
|
||||
if (!Number.isInteger(videoInfo.season) && video.path.includes('/')) {
|
||||
@@ -628,9 +636,9 @@ class TorrentFileService implements ITorrentFileService {
|
||||
}
|
||||
|
||||
return { ...video, ...videoInfo };
|
||||
}
|
||||
};
|
||||
|
||||
private isMovieVideo(torrent: IParsedTorrent, video: IFileAttributes, otherVideos: IFileAttributes[], hasMovies: boolean): boolean {
|
||||
private isMovieVideo = (torrent: IParsedTorrent, video: IFileAttributes, otherVideos: IFileAttributes[], hasMovies: boolean): boolean => {
|
||||
if (Number.isInteger(torrent.season) && Array.isArray(torrent.episodes)) {
|
||||
// not movie if video has season
|
||||
return false;
|
||||
@@ -652,28 +660,22 @@ class TorrentFileService implements ITorrentFileService {
|
||||
return !!torrent.year
|
||||
&& otherVideos.length > 3
|
||||
&& otherVideos.filter(other => other.title === video.title && other.year === video.year).length < 3;
|
||||
}
|
||||
};
|
||||
|
||||
private clearInfoFields(video: IFileAttributes) {
|
||||
private clearInfoFields = (video: IFileAttributes) => {
|
||||
video.imdbId = undefined;
|
||||
video.imdbSeason = undefined;
|
||||
video.imdbEpisode = undefined;
|
||||
video.kitsuId = undefined;
|
||||
video.kitsuEpisode = undefined;
|
||||
return video;
|
||||
}
|
||||
};
|
||||
|
||||
private div100(episode: number) {
|
||||
return (episode / 100 >> 0); // floor to nearest int
|
||||
}
|
||||
private div100 = (episode: number) => (episode / 100 >> 0);
|
||||
|
||||
private mod100(episode: number) {
|
||||
return episode % 100;
|
||||
}
|
||||
private mod100 = (episode: number) => episode % 100;
|
||||
}
|
||||
|
||||
export const torrentFileService = new TorrentFileService();
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,32 +1,46 @@
|
||||
import {TorrentType} from "../enums/torrent_types";
|
||||
import {logger} from "./logging_service";
|
||||
import {trackerService} from "./tracker_service";
|
||||
import {torrentEntriesService} from "./torrent_entries_service";
|
||||
import {IIngestedTorrentAttributes} from "../../repository/interfaces/ingested_torrent_attributes";
|
||||
import {IParsedTorrent} from "../interfaces/parsed_torrent";
|
||||
import {ITorrentProcessingService} from "../interfaces/torrent_processing_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {ITorrentEntriesService} from "../interfaces/torrent_entries_service";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
import {ITrackerService} from "../interfaces/tracker_service";
|
||||
|
||||
class TorrentProcessingService implements ITorrentProcessingService {
|
||||
public async processTorrentRecord(torrent: IIngestedTorrentAttributes): Promise<void> {
|
||||
@injectable()
|
||||
export class TorrentProcessingService implements ITorrentProcessingService {
|
||||
private torrentEntriesService: ITorrentEntriesService;
|
||||
private logger: ILoggingService;
|
||||
private trackerService: ITrackerService;
|
||||
constructor(@inject(IocTypes.ITorrentEntriesService) torrentEntriesService: ITorrentEntriesService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService,
|
||||
@inject(IocTypes.ITrackerService) trackerService: ITrackerService){
|
||||
this.torrentEntriesService = torrentEntriesService;
|
||||
this.logger = logger;
|
||||
this.trackerService = trackerService;
|
||||
}
|
||||
|
||||
public processTorrentRecord = async (torrent: IIngestedTorrentAttributes): Promise<void> => {
|
||||
const {category} = torrent;
|
||||
const type = category === 'tv' ? TorrentType.Series : TorrentType.Movie;
|
||||
const torrentInfo: IParsedTorrent = await this.parseTorrent(torrent, type);
|
||||
|
||||
logger.info(`Processing torrent ${torrentInfo.title} with infoHash ${torrentInfo.infoHash}`);
|
||||
this.logger.info(`Processing torrent ${torrentInfo.title} with infoHash ${torrentInfo.infoHash}`);
|
||||
|
||||
if (await torrentEntriesService.checkAndUpdateTorrent(torrentInfo)) {
|
||||
if (await this.torrentEntriesService.checkAndUpdateTorrent(torrentInfo)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return torrentEntriesService.createTorrentEntry(torrentInfo);
|
||||
}
|
||||
return this.torrentEntriesService.createTorrentEntry(torrentInfo, false);
|
||||
};
|
||||
|
||||
private async assignTorrentTrackers(): Promise<string> {
|
||||
const trackers = await trackerService.getTrackers();
|
||||
private assignTorrentTrackers = async (): Promise<string> => {
|
||||
const trackers = await this.trackerService.getTrackers();
|
||||
return trackers.join(',');
|
||||
}
|
||||
|
||||
private async parseTorrent(torrent: IIngestedTorrentAttributes, category: string): Promise<IParsedTorrent> {
|
||||
private parseTorrent = async (torrent: IIngestedTorrentAttributes, category: string): Promise<IParsedTorrent> => {
|
||||
const infoHash = torrent.info_hash?.trim().toLowerCase()
|
||||
return {
|
||||
title: torrent.name,
|
||||
@@ -40,16 +54,14 @@ class TorrentProcessingService implements ITorrentProcessingService {
|
||||
provider: torrent.source,
|
||||
trackers: await this.assignTorrentTrackers(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private parseImdbId(torrent: IIngestedTorrentAttributes): string | undefined {
|
||||
private parseImdbId = (torrent: IIngestedTorrentAttributes): string | undefined => {
|
||||
if (torrent.imdb === undefined || torrent.imdb === null) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return torrent.imdb;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const torrentProcessingService = new TorrentProcessingService();
|
||||
|
||||
|
||||
@@ -2,9 +2,11 @@ import {parse} from 'parse-torrent-title';
|
||||
import {ITorrentFileCollection} from "../interfaces/torrent_file_collection";
|
||||
import {IFileAttributes} from "../../repository/interfaces/file_attributes";
|
||||
import {ITorrentSubtitleService} from "../interfaces/torrent_subtitle_service";
|
||||
import {injectable} from "inversify";
|
||||
|
||||
class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
public assignSubtitles(fileCollection: ITorrentFileCollection): ITorrentFileCollection {
|
||||
@injectable()
|
||||
export class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
public assignSubtitles = (fileCollection: ITorrentFileCollection): ITorrentFileCollection => {
|
||||
if (fileCollection.videos && fileCollection.videos.length && fileCollection.subtitles && fileCollection.subtitles.length) {
|
||||
if (fileCollection.videos.length === 1) {
|
||||
fileCollection.videos[0].subtitles = fileCollection.subtitles;
|
||||
@@ -24,9 +26,9 @@ class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
return {...fileCollection, subtitles: unassignedSubs};
|
||||
}
|
||||
return fileCollection;
|
||||
}
|
||||
};
|
||||
|
||||
private parseVideo(video: IFileAttributes) {
|
||||
private parseVideo = (video: IFileAttributes)=> {
|
||||
const fileName = video.title.split('/').pop().replace(/\.(\w{2,4})$/, '');
|
||||
const folderName = video.title.replace(/\/?[^/]+$/, '');
|
||||
return {
|
||||
@@ -37,7 +39,7 @@ class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
};
|
||||
}
|
||||
|
||||
private mostProbableSubtitleVideos(subtitle: any, parsedVideos: any[]) {
|
||||
private mostProbableSubtitleVideos = (subtitle: any, parsedVideos: any[]) => {
|
||||
const subTitle = (subtitle.title || subtitle.path).split('/').pop().replace(/\.(\w{2,4})$/, '');
|
||||
const parsedSub = this.parsePath(subtitle.title || subtitle.path);
|
||||
const byFileName = parsedVideos.filter(video => subTitle.includes(video.fileName));
|
||||
@@ -66,17 +68,17 @@ class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private singleVideoFile(videos: any[]) {
|
||||
private singleVideoFile = (videos: any[])=> {
|
||||
return new Set(videos.map(v => v.videoFile.fileIndex)).size === 1;
|
||||
}
|
||||
|
||||
private parsePath(path: string) {
|
||||
private parsePath = (path: string) => {
|
||||
const pathParts = path.split('/').map(part => this.parseFilename(part));
|
||||
const parsedWithEpisode = pathParts.find(parsed => parsed.season && parsed.episodes);
|
||||
return parsedWithEpisode || pathParts[pathParts.length - 1];
|
||||
}
|
||||
|
||||
private parseFilename(filename: string) {
|
||||
private parseFilename = (filename: string) => {
|
||||
const parsedInfo = parse(filename)
|
||||
const titleEpisode = parsedInfo.title.match(/(\d+)$/);
|
||||
if (!parsedInfo.episodes && titleEpisode) {
|
||||
@@ -85,10 +87,8 @@ class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
return parsedInfo;
|
||||
}
|
||||
|
||||
private arrayEquals(array1: any[], array2: any[]) {
|
||||
private arrayEquals = (array1: any[], array2: any[]) => {
|
||||
if (!array1 || !array2) return array1 === array2;
|
||||
return array1.length === array2.length && array1.every((value, index) => value === array2[index])
|
||||
}
|
||||
}
|
||||
|
||||
export const torrentSubtitleService = new TorrentSubtitleService();
|
||||
|
||||
@@ -1,15 +1,25 @@
|
||||
import axios, {AxiosResponse} from 'axios';
|
||||
import {cacheService} from "./cache_service";
|
||||
import {configurationService} from './configuration_service';
|
||||
import {logger} from "./logging_service";
|
||||
import {ITrackerService} from "../interfaces/tracker_service";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IocTypes} from "../models/ioc_types";
|
||||
import {ICacheService} from "../interfaces/cache_service";
|
||||
import {ILoggingService} from "../interfaces/logging_service";
|
||||
|
||||
class TrackerService implements ITrackerService {
|
||||
public async getTrackers(): Promise<string[]> {
|
||||
return cacheService.cacheTrackers(this.downloadTrackers);
|
||||
};
|
||||
@injectable()
|
||||
export class TrackerService implements ITrackerService {
|
||||
private cacheService: ICacheService;
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.ICacheService) cacheService: ICacheService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.cacheService = cacheService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
private async downloadTrackers(): Promise<string[]> {
|
||||
public getTrackers = async (): Promise<string[]> => this.cacheService.cacheTrackers(this.downloadTrackers);
|
||||
|
||||
private downloadTrackers = async(): Promise<string[]> => {
|
||||
const response: AxiosResponse<string> = await axios.get(configurationService.trackerConfig.TRACKERS_URL);
|
||||
const trackersListText: string = response.data;
|
||||
// Trackers are separated by a newline character
|
||||
@@ -23,11 +33,9 @@ class TrackerService implements ITrackerService {
|
||||
|
||||
}
|
||||
|
||||
logger.info(`Trackers updated at ${Date.now()}: ${urlTrackers.length} trackers`);
|
||||
this.logger.info(`Trackers updated at ${Date.now()}: ${urlTrackers.length} trackers`);
|
||||
|
||||
return urlTrackers;
|
||||
};
|
||||
}
|
||||
|
||||
export const trackerService = new TrackerService();
|
||||
|
||||
|
||||
8
src/node/consumer/src/main.ts
Normal file
8
src/node/consumer/src/main.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import {serviceContainer} from "./lib/models/inversify_config";
|
||||
import {IocTypes} from "./lib/models/ioc_types";
|
||||
import {ICompositionalRoot} from "./lib/interfaces/composition_root";
|
||||
|
||||
(async () => {
|
||||
const compositionalRoot = serviceContainer.get<ICompositionalRoot>(IocTypes.ICompositionalRoot);
|
||||
await compositionalRoot.start();
|
||||
})();
|
||||
@@ -13,9 +13,13 @@ import {SkipTorrent} from "./models/skipTorrent";
|
||||
import {IFileAttributes} from "./interfaces/file_attributes";
|
||||
import {ITorrentAttributes} from "./interfaces/torrent_attributes";
|
||||
import {IngestedPage} from "./models/ingestedPage";
|
||||
import {logger} from "../lib/services/logging_service";
|
||||
import {ILoggingService} from "../lib/interfaces/logging_service";
|
||||
import {IocTypes} from "../lib/models/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {IDatabaseRepository} from "./interfaces/database_repository";
|
||||
|
||||
class DatabaseRepository {
|
||||
@injectable()
|
||||
export class DatabaseRepository implements IDatabaseRepository {
|
||||
private readonly database: Sequelize;
|
||||
|
||||
private models = [
|
||||
@@ -27,59 +31,56 @@ class DatabaseRepository {
|
||||
SkipTorrent,
|
||||
IngestedTorrent,
|
||||
IngestedPage];
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor() {
|
||||
constructor(@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.logger = logger;
|
||||
this.database = this.createDatabase();
|
||||
}
|
||||
|
||||
public async connect() {
|
||||
|
||||
public connect = async () => {
|
||||
try {
|
||||
await this.database.sync({alter: configurationService.databaseConfig.AUTO_CREATE_AND_APPLY_MIGRATIONS});
|
||||
} catch {
|
||||
logger.error('Failed syncing database');
|
||||
this.logger.error('Failed syncing database');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public async getProvider(provider: Provider) {
|
||||
public getProvider = async (provider: Provider) => {
|
||||
try {
|
||||
const [result] = await Provider.findOrCreate({ where: { name: { [Op.eq]: provider.name } }, defaults: provider });
|
||||
const [result] = await Provider.findOrCreate({where: {name: {[Op.eq]: provider.name}}, defaults: provider});
|
||||
return result;
|
||||
} catch {
|
||||
return provider as Provider;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public async getTorrent(torrent: ITorrentAttributes): Promise<Torrent | null> {
|
||||
public getTorrent = async (torrent: ITorrentAttributes): Promise<Torrent | null> => {
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId };
|
||||
return await Torrent.findOne({ where });
|
||||
}
|
||||
? {infoHash: torrent.infoHash}
|
||||
: {provider: torrent.provider, torrentId: torrent.torrentId};
|
||||
return await Torrent.findOne({where});
|
||||
};
|
||||
|
||||
public async getTorrentsBasedOnTitle(titleQuery: string, type: string): Promise<Torrent[]> {
|
||||
return this.getTorrentsBasedOnQuery({ title: { [Op.regexp]: `${titleQuery}` }, type });
|
||||
}
|
||||
public getTorrentsBasedOnTitle = async (titleQuery: string, type: string): Promise<Torrent[]> => this.getTorrentsBasedOnQuery({
|
||||
title: {[Op.regexp]: `${titleQuery}`},
|
||||
type
|
||||
});
|
||||
|
||||
public async getTorrentsBasedOnQuery(where: WhereOptions<ITorrentAttributes>): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({ where });
|
||||
}
|
||||
public getTorrentsBasedOnQuery = async (where: WhereOptions<ITorrentAttributes>): Promise<Torrent[]> => await Torrent.findAll({where});
|
||||
|
||||
public async getFilesBasedOnQuery(where: WhereOptions<IFileAttributes>): Promise<File[]> {
|
||||
return await File.findAll({ where });
|
||||
}
|
||||
public getFilesBasedOnQuery = async (where: WhereOptions<IFileAttributes>): Promise<File[]> => await File.findAll({where});
|
||||
|
||||
public async getTorrentsWithoutSize(): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
public getTorrentsWithoutSize = async (): Promise<Torrent[]> => await Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
|
||||
public async getUpdateSeedersTorrents(limit = 50): Promise<Torrent[]> {
|
||||
public getUpdateSeedersTorrents = async (limit = 50): Promise<Torrent[]> => {
|
||||
const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
|
||||
return await Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${until}'`),
|
||||
@@ -89,9 +90,9 @@ class DatabaseRepository {
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
public async getUpdateSeedersNewTorrents(limit = 50): Promise<Torrent[]> {
|
||||
public getUpdateSeedersNewTorrents = async (limit = 50): Promise<Torrent[]> => {
|
||||
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
|
||||
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
|
||||
return await Torrent.findAll({
|
||||
@@ -102,38 +103,34 @@ class DatabaseRepository {
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
public async getNoContentsTorrents(): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({
|
||||
where: { opened: false, seeders: { [Op.gte]: 1 } },
|
||||
limit: 500,
|
||||
order: literal('random()')
|
||||
});
|
||||
}
|
||||
public getNoContentsTorrents = async (): Promise<Torrent[]> => await Torrent.findAll({
|
||||
where: {opened: false, seeders: {[Op.gte]: 1}},
|
||||
limit: 500,
|
||||
order: literal('random()')
|
||||
});
|
||||
|
||||
public async createTorrent(torrent: Torrent): Promise<void> {
|
||||
public createTorrent = async (torrent: Torrent): Promise<void> => {
|
||||
await Torrent.upsert(torrent);
|
||||
await this.createContents(torrent.infoHash, torrent.contents);
|
||||
await this.createSubtitles(torrent.infoHash, torrent.subtitles);
|
||||
}
|
||||
};
|
||||
|
||||
public async setTorrentSeeders(torrent: ITorrentAttributes, seeders: number): Promise<[number]> {
|
||||
public setTorrentSeeders = async (torrent: ITorrentAttributes, seeders: number): Promise<[number]> => {
|
||||
const where = torrent.infoHash
|
||||
? { infoHash: torrent.infoHash }
|
||||
: { provider: torrent.provider, torrentId: torrent.torrentId };
|
||||
|
||||
? {infoHash: torrent.infoHash}
|
||||
: {provider: torrent.provider, torrentId: torrent.torrentId};
|
||||
|
||||
return await Torrent.update(
|
||||
{ seeders: seeders },
|
||||
{ where: where }
|
||||
{seeders: seeders},
|
||||
{where: where}
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
public async deleteTorrent(infoHash: string): Promise<number> {
|
||||
return await Torrent.destroy({ where: { infoHash: infoHash } });
|
||||
}
|
||||
public deleteTorrent = async (infoHash: string): Promise<number> => await Torrent.destroy({where: {infoHash: infoHash}});
|
||||
|
||||
public async createFile(file: File): Promise<void> {
|
||||
public createFile = async (file: File): Promise<void> => {
|
||||
if (file.id) {
|
||||
if (file.dataValues) {
|
||||
await file.save();
|
||||
@@ -148,30 +145,24 @@ class DatabaseRepository {
|
||||
return subtitle;
|
||||
});
|
||||
}
|
||||
await File.create(file, { include: [Subtitle], ignoreDuplicates: true });
|
||||
await File.create(file, {include: [Subtitle], ignoreDuplicates: true});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public async getFiles(infoHash: string): Promise<File[]> {
|
||||
return File.findAll({ where: { infoHash: infoHash } });
|
||||
}
|
||||
public getFiles = async (infoHash: string): Promise<File[]> => File.findAll({where: {infoHash: infoHash}});
|
||||
|
||||
public async getFilesBasedOnTitle(titleQuery: string): Promise<File[]> {
|
||||
return File.findAll({ where: { title: { [Op.regexp]: `${titleQuery}` } } });
|
||||
}
|
||||
public getFilesBasedOnTitle = async (titleQuery: string): Promise<File[]> => File.findAll({where: {title: {[Op.regexp]: `${titleQuery}`}}});
|
||||
|
||||
public async deleteFile(id: number): Promise<number> {
|
||||
return File.destroy({ where: { id: id } });
|
||||
}
|
||||
public deleteFile = async (id: number): Promise<number> => File.destroy({where: {id: id}});
|
||||
|
||||
public async createSubtitles(infoHash: string, subtitles: Subtitle[]): Promise<void | Model<any, any>[]> {
|
||||
public createSubtitles = async (infoHash: string, subtitles: Subtitle[]): Promise<void | Model<any, any>[]> => {
|
||||
if (subtitles && subtitles.length) {
|
||||
return Subtitle.bulkCreate(subtitles.map(subtitle => ({ infoHash, title: subtitle.path, ...subtitle })));
|
||||
return Subtitle.bulkCreate(subtitles.map(subtitle => ({infoHash, title: subtitle.path, ...subtitle})));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
};
|
||||
|
||||
public async upsertSubtitles(file: File, subtitles: Subtitle[]): Promise<void> {
|
||||
public upsertSubtitles = async (file: File, subtitles: Subtitle[]): Promise<void> => {
|
||||
if (file.id && subtitles && subtitles.length) {
|
||||
await PromiseHelpers.sequence(subtitles
|
||||
.map(subtitle => {
|
||||
@@ -188,40 +179,32 @@ class DatabaseRepository {
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public async getSubtitles(infoHash: string): Promise<Subtitle[]> {
|
||||
return Subtitle.findAll({ where: { infoHash: infoHash } });
|
||||
}
|
||||
public getSubtitles = async (infoHash: string): Promise<Subtitle[]> => Subtitle.findAll({where: {infoHash: infoHash}});
|
||||
|
||||
public async getUnassignedSubtitles(): Promise<Subtitle[]> {
|
||||
return Subtitle.findAll({ where: { fileId: null } });
|
||||
}
|
||||
public getUnassignedSubtitles = async (): Promise<Subtitle[]> => Subtitle.findAll({where: {fileId: null}});
|
||||
|
||||
public async createContents(infoHash: string, contents: Content[]): Promise<void> {
|
||||
public createContents = async (infoHash: string, contents: Content[]): Promise<void> => {
|
||||
if (contents && contents.length) {
|
||||
await Content.bulkCreate(contents.map(content => ({ infoHash, ...content })), { ignoreDuplicates: true });
|
||||
await Torrent.update({ opened: true }, { where: { infoHash: infoHash }, silent: true });
|
||||
await Content.bulkCreate(contents.map(content => ({infoHash, ...content})), {ignoreDuplicates: true});
|
||||
await Torrent.update({opened: true}, {where: {infoHash: infoHash}, silent: true});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public async getContents(infoHash: string): Promise<Content[]> {
|
||||
return Content.findAll({ where: { infoHash: infoHash } });
|
||||
}
|
||||
public getContents = async (infoHash: string): Promise<Content[]> => Content.findAll({where: {infoHash: infoHash}});
|
||||
|
||||
public async getSkipTorrent(infoHash: string): Promise<SkipTorrent> {
|
||||
public getSkipTorrent = async (infoHash: string): Promise<SkipTorrent> => {
|
||||
const result = await SkipTorrent.findByPk(infoHash);
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${infoHash}`);
|
||||
}
|
||||
return result.dataValues as SkipTorrent;
|
||||
}
|
||||
};
|
||||
|
||||
public async createSkipTorrent(torrent: Torrent): Promise<[SkipTorrent, boolean]> {
|
||||
return SkipTorrent.upsert({ infoHash: torrent.infoHash });
|
||||
}
|
||||
|
||||
private createDatabase(): Sequelize {
|
||||
public createSkipTorrent = async (torrent: Torrent): Promise<[SkipTorrent, boolean]> => SkipTorrent.upsert({infoHash: torrent.infoHash});
|
||||
|
||||
private createDatabase = (): Sequelize => {
|
||||
const newDatabase = new Sequelize(
|
||||
configurationService.databaseConfig.POSTGRES_URI,
|
||||
{
|
||||
@@ -232,7 +215,5 @@ class DatabaseRepository {
|
||||
newDatabase.addModels(this.models);
|
||||
|
||||
return newDatabase;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const repository = new DatabaseRepository();
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
import {Provider} from "../models/provider";
|
||||
import {WhereOptions} from "sequelize";
|
||||
import {ITorrentAttributes} from "./torrent_attributes";
|
||||
import {Torrent} from "../models/torrent";
|
||||
import {IFileAttributes} from "./file_attributes";
|
||||
import {File} from "../models/file";
|
||||
import {Subtitle} from "../models/subtitle";
|
||||
import {Model} from "sequelize-typescript";
|
||||
import {Content} from "../models/content";
|
||||
import {SkipTorrent} from "../models/skipTorrent";
|
||||
|
||||
export interface IDatabaseRepository {
|
||||
connect(): Promise<void>;
|
||||
|
||||
getProvider(provider: Provider): Promise<Provider>;
|
||||
|
||||
getTorrent(torrent: ITorrentAttributes): Promise<Torrent | null>;
|
||||
|
||||
getTorrentsBasedOnTitle(titleQuery: string, type: string): Promise<Torrent[]>;
|
||||
|
||||
getTorrentsBasedOnQuery(where: WhereOptions<ITorrentAttributes>): Promise<Torrent[]>;
|
||||
|
||||
getFilesBasedOnQuery(where: WhereOptions<IFileAttributes>): Promise<File[]>;
|
||||
|
||||
getTorrentsWithoutSize(): Promise<Torrent[]>;
|
||||
|
||||
getUpdateSeedersTorrents(limit): Promise<Torrent[]>;
|
||||
|
||||
getUpdateSeedersNewTorrents(limit): Promise<Torrent[]>;
|
||||
|
||||
getNoContentsTorrents(): Promise<Torrent[]>;
|
||||
|
||||
createTorrent(torrent: Torrent): Promise<void>;
|
||||
|
||||
setTorrentSeeders(torrent: ITorrentAttributes, seeders: number): Promise<[number]>;
|
||||
|
||||
deleteTorrent(infoHash: string): Promise<number>;
|
||||
|
||||
createFile(file: File): Promise<void>;
|
||||
|
||||
getFiles(infoHash: string): Promise<File[]>;
|
||||
|
||||
getFilesBasedOnTitle(titleQuery: string): Promise<File[]>;
|
||||
|
||||
deleteFile(id: number): Promise<number>;
|
||||
|
||||
createSubtitles(infoHash: string, subtitles: Subtitle[]): Promise<void | Model<any, any>[]>;
|
||||
|
||||
upsertSubtitles(file: File, subtitles: Subtitle[]): Promise<void>;
|
||||
|
||||
getSubtitles(infoHash: string): Promise<Subtitle[]>;
|
||||
|
||||
getUnassignedSubtitles(): Promise<Subtitle[]>;
|
||||
|
||||
createContents(infoHash: string, contents: Content[]): Promise<void>;
|
||||
|
||||
getContents(infoHash: string): Promise<Content[]>;
|
||||
|
||||
getSkipTorrent(infoHash: string): Promise<SkipTorrent>;
|
||||
|
||||
createSkipTorrent(torrent: Torrent): Promise<[SkipTorrent, boolean]>;
|
||||
}
|
||||
Reference in New Issue
Block a user