mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
Comment resolution
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -404,4 +404,3 @@ FodyWeavers.xsd
|
||||
*.sln.iml
|
||||
|
||||
dist/
|
||||
**/localdev.compose.yml
|
||||
@@ -1 +1,3 @@
|
||||
dist/
|
||||
esbuild.ts
|
||||
jest.config.ts
|
||||
@@ -1 +0,0 @@
|
||||
v20.10.0
|
||||
@@ -8,7 +8,7 @@ COPY package*.json ./
|
||||
RUN npm install
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
RUN npm install --omit=dev
|
||||
RUN npm prune --omit=dev
|
||||
|
||||
FROM node:lts-buster-slim
|
||||
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
import {build} from "esbuild";
|
||||
import {readFileSync, rmSync} from "fs";
|
||||
import { build } from "esbuild";
|
||||
import { readFileSync, rmSync } from "fs";
|
||||
|
||||
const {devDependencies} = JSON.parse(readFileSync("./package.json", "utf8"));
|
||||
interface DevDependencies {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
interface PackageJson {
|
||||
devDependencies?: DevDependencies;
|
||||
}
|
||||
|
||||
const { devDependencies } = JSON.parse(readFileSync("./package.json", "utf8")) as PackageJson;
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
try {
|
||||
const outdir = "dist";
|
||||
|
||||
rmSync(outdir, {recursive: true, force: true});
|
||||
rmSync(outdir, { recursive: true, force: true });
|
||||
|
||||
build({
|
||||
bundle: true,
|
||||
@@ -27,8 +35,8 @@ try {
|
||||
plugins: [
|
||||
{
|
||||
name: "populate-import-meta",
|
||||
setup: ({onLoad}) => {
|
||||
onLoad({filter: new RegExp(`${import.meta.dirname}/src/.*.(js|ts)$`)}, args => {
|
||||
setup: ({ onLoad }) => {
|
||||
onLoad({ filter: new RegExp(`${import.meta.dirname}/src/.*.(js|ts)$`) }, args => {
|
||||
const contents = readFileSync(args.path, "utf8");
|
||||
|
||||
const transformedContents = contents
|
||||
@@ -36,19 +44,15 @@ try {
|
||||
.replace(/import\.meta\.filename/g, "__filename")
|
||||
.replace(/import\.meta\.dirname/g, "__dirname");
|
||||
|
||||
return {contents: transformedContents, loader: "default"};
|
||||
return { contents: transformedContents, loader: "default" };
|
||||
});
|
||||
},
|
||||
}
|
||||
],
|
||||
}).then(() => {
|
||||
// biome-ignore lint/style/useTemplate: <explanation>
|
||||
// eslint-disable-next-line no-undef
|
||||
console.log("⚡ " + "\x1b[32m" + `Done in ${Date.now() - start}ms`);
|
||||
});
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line no-undef
|
||||
console.log(e);
|
||||
// eslint-disable-next-line no-undef
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
const {pathsToModuleNameMapper} = require('ts-jest');
|
||||
const {compilerOptions} = require('./tsconfig.json');
|
||||
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
moduleNameMapper: pathsToModuleNameMapper(compilerOptions.paths, {prefix: '<rootDir>/src/'}),
|
||||
modulePaths: [
|
||||
'<rootDir>'
|
||||
],
|
||||
};
|
||||
14
src/node/consumer/jest.config.ts
Normal file
14
src/node/consumer/jest.config.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { pathsToModuleNameMapper } from 'ts-jest';
|
||||
import { compilerOptions } from './tsconfig.json';
|
||||
|
||||
export default {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
moduleNameMapper: pathsToModuleNameMapper(compilerOptions.paths, { prefix: '<rootDir>/src/' }),
|
||||
modulePaths: [
|
||||
'<rootDir>'
|
||||
],
|
||||
transform: {
|
||||
'^.+\\.tsx?$': 'ts-jest',
|
||||
},
|
||||
};
|
||||
@@ -4,9 +4,9 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"clean": "rm -rf dist",
|
||||
"build": "node esbuild.js",
|
||||
"build": "tsx esbuild.ts",
|
||||
"dev": "tsx watch --ignore node_modules src/main.ts | pino-pretty",
|
||||
"start": "node --trace-deprecation dist/main.cjs",
|
||||
"start": "node dist/main.cjs",
|
||||
"lint": "eslint ./src --ext .ts,.js",
|
||||
"lint-fix": "npm run lint -- --fix",
|
||||
"test": "jest",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export const BooleanHelpers = {
|
||||
parseBool: function (value: string | number | undefined, defaultValue: boolean): boolean {
|
||||
parseBool: (value: string | number | undefined, defaultValue: boolean): boolean => {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return parseStringToBool(value, defaultValue);
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
export interface ICompositionalRoot {
|
||||
start(): Promise<void>;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import {ICommonVideoMetadata} from "@interfaces/common_video_metadata";
|
||||
|
||||
export interface ISeasonEpisodeMap {
|
||||
[season: number]: {
|
||||
[episode: number]: ICommonVideoMetadata;
|
||||
}
|
||||
}
|
||||
@@ -2,26 +2,21 @@
|
||||
import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {IProcessTorrentsJob} from "@interfaces/process_torrents_job";
|
||||
import {ITorrentProcessingService} from "@interfaces/torrent_processing_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IIngestedTorrentAttributes} from "@repository/interfaces/ingested_torrent_attributes";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import client, {Channel, Connection, ConsumeMessage, Options} from 'amqplib'
|
||||
import {inject, injectable} from "inversify";
|
||||
|
||||
@injectable()
|
||||
export class ProcessTorrentsJob implements IProcessTorrentsJob {
|
||||
@inject(IocTypes.ITorrentProcessingService) torrentProcessingService: ITorrentProcessingService;
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService;
|
||||
|
||||
private readonly assertQueueOptions: Options.AssertQueue = {durable: true};
|
||||
private readonly consumeQueueOptions: Options.Consume = {noAck: false};
|
||||
private torrentProcessingService: ITorrentProcessingService;
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.ITorrentProcessingService) torrentProcessingService: ITorrentProcessingService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.torrentProcessingService = torrentProcessingService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public listenToQueue = async (): Promise<void> => {
|
||||
async listenToQueue(): Promise<void> {
|
||||
if (!configurationService.jobConfig.JOBS_ENABLED) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import {ICompositionalRoot} from "@interfaces/composition_root";
|
||||
import {IProcessTorrentsJob} from "@interfaces/process_torrents_job";
|
||||
import {ITrackerService} from "@interfaces/tracker_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IDatabaseRepository} from "@repository/interfaces/database_repository";
|
||||
import {inject, injectable} from "inversify";
|
||||
|
||||
@injectable()
|
||||
export class CompositionalRoot implements ICompositionalRoot {
|
||||
private trackerService: ITrackerService;
|
||||
private databaseRepository: IDatabaseRepository;
|
||||
private processTorrentsJob: IProcessTorrentsJob;
|
||||
|
||||
constructor(@inject(IocTypes.ITrackerService) trackerService: ITrackerService,
|
||||
@inject(IocTypes.IDatabaseRepository) databaseRepository: IDatabaseRepository,
|
||||
@inject(IocTypes.IProcessTorrentsJob) processTorrentsJob: IProcessTorrentsJob) {
|
||||
this.trackerService = trackerService;
|
||||
this.databaseRepository = databaseRepository;
|
||||
this.processTorrentsJob = processTorrentsJob;
|
||||
}
|
||||
|
||||
start = async (): Promise<void> => {
|
||||
await this.trackerService.getTrackers();
|
||||
await this.databaseRepository.connect();
|
||||
await this.processTorrentsJob.listenToQueue();
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import {PromiseHelpers} from '@helpers/promises_helpers';
|
||||
import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IContentCreationAttributes} from "@repository/interfaces/content_attributes";
|
||||
import {IDatabaseRepository} from "@repository/interfaces/database_repository";
|
||||
import {IFileAttributes, IFileCreationAttributes} from "@repository/interfaces/file_attributes";
|
||||
@@ -15,6 +14,7 @@ import {SkipTorrent} from "@repository/models/skipTorrent";
|
||||
import {Subtitle} from "@repository/models/subtitle";
|
||||
import {Torrent} from "@repository/models/torrent";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
import moment from 'moment';
|
||||
import {literal, Op, WhereOptions} from "sequelize";
|
||||
@@ -22,6 +22,8 @@ import {Model, Sequelize} from 'sequelize-typescript';
|
||||
|
||||
@injectable()
|
||||
export class DatabaseRepository implements IDatabaseRepository {
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService;
|
||||
|
||||
private readonly database: Sequelize;
|
||||
|
||||
private models = [
|
||||
@@ -34,14 +36,11 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
IngestedTorrent,
|
||||
IngestedPage];
|
||||
|
||||
private logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.logger = logger;
|
||||
constructor() {
|
||||
this.database = this.createDatabase();
|
||||
}
|
||||
|
||||
public connect = async (): Promise<void> => {
|
||||
async connect(): Promise<void> {
|
||||
try {
|
||||
await this.database.sync({alter: configurationService.databaseConfig.AUTO_CREATE_AND_APPLY_MIGRATIONS});
|
||||
} catch (error) {
|
||||
@@ -49,42 +48,50 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
this.logger.error('Failed syncing database');
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getProvider = async (provider: Provider): Promise<Provider> => {
|
||||
async getProvider(provider: Provider): Promise<Provider> {
|
||||
try {
|
||||
const [result] = await Provider.findOrCreate({where: {name: {[Op.eq]: provider.name}}, defaults: provider});
|
||||
return result;
|
||||
} catch {
|
||||
return provider as Provider;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getTorrent = async (torrent: ITorrentAttributes): Promise<Torrent | null> => {
|
||||
async getTorrent(torrent: ITorrentAttributes): Promise<Torrent | null> {
|
||||
const where = torrent.infoHash
|
||||
? {infoHash: torrent.infoHash}
|
||||
: {provider: torrent.provider, torrentId: torrent.torrentId};
|
||||
return await Torrent.findOne({where});
|
||||
};
|
||||
}
|
||||
|
||||
public getTorrentsBasedOnTitle = async (titleQuery: string, type: string): Promise<Torrent[]> => this.getTorrentsBasedOnQuery({
|
||||
title: {[Op.regexp]: `${titleQuery}`},
|
||||
type
|
||||
});
|
||||
async getTorrentsBasedOnTitle(titleQuery: string, type: string): Promise<Torrent[]> {
|
||||
return this.getTorrentsBasedOnQuery({
|
||||
title: {[Op.regexp]: `${titleQuery}`},
|
||||
type
|
||||
});
|
||||
}
|
||||
|
||||
public getTorrentsBasedOnQuery = async (where: WhereOptions<ITorrentAttributes>): Promise<Torrent[]> => await Torrent.findAll({where});
|
||||
async getTorrentsBasedOnQuery(where: WhereOptions<ITorrentAttributes>): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({where});
|
||||
}
|
||||
|
||||
public getFilesBasedOnQuery = async (where: WhereOptions<IFileAttributes>): Promise<File[]> => await File.findAll({where});
|
||||
async getFilesBasedOnQuery(where: WhereOptions<IFileAttributes>): Promise<File[]> {
|
||||
return await File.findAll({where});
|
||||
}
|
||||
|
||||
public getTorrentsWithoutSize = async (): Promise<Torrent[]> => await Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
async getTorrentsWithoutSize(): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({
|
||||
where: literal(
|
||||
'exists (select 1 from files where files."infoHash" = torrent."infoHash" and files.size = 300000000)'),
|
||||
order: [
|
||||
['seeders', 'DESC']
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
public getUpdateSeedersTorrents = async (limit = 50): Promise<Torrent[]> => {
|
||||
async getUpdateSeedersTorrents(limit = 50): Promise<Torrent[]> {
|
||||
const until = moment().subtract(7, 'days').format('YYYY-MM-DD');
|
||||
return await Torrent.findAll({
|
||||
where: literal(`torrent."updatedAt" < '${until}'`),
|
||||
@@ -94,9 +101,9 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
public getUpdateSeedersNewTorrents = async (limit = 50): Promise<Torrent[]> => {
|
||||
async getUpdateSeedersNewTorrents(limit = 50): Promise<Torrent[]> {
|
||||
const lastUpdate = moment().subtract(12, 'hours').format('YYYY-MM-DD');
|
||||
const createdAfter = moment().subtract(4, 'days').format('YYYY-MM-DD');
|
||||
return await Torrent.findAll({
|
||||
@@ -107,15 +114,17 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
['updatedAt', 'ASC']
|
||||
]
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
public getNoContentsTorrents = async (): Promise<Torrent[]> => await Torrent.findAll({
|
||||
where: {opened: false, seeders: {[Op.gte]: 1}},
|
||||
limit: 500,
|
||||
order: literal('random()')
|
||||
});
|
||||
async getNoContentsTorrents(): Promise<Torrent[]> {
|
||||
return await Torrent.findAll({
|
||||
where: {opened: false, seeders: {[Op.gte]: 1}},
|
||||
limit: 500,
|
||||
order: literal('random()')
|
||||
});
|
||||
}
|
||||
|
||||
public createTorrent = async (torrent: ITorrentCreationAttributes): Promise<void> => {
|
||||
async createTorrent(torrent: ITorrentCreationAttributes): Promise<void> {
|
||||
try {
|
||||
await Torrent.upsert(torrent);
|
||||
await this.createContents(torrent.infoHash, torrent.contents);
|
||||
@@ -124,9 +133,9 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
this.logger.error(`Failed to create torrent: ${torrent.infoHash}`);
|
||||
this.logger.debug("Error: ", error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public setTorrentSeeders = async (torrent: ITorrentAttributes, seeders: number): Promise<[number]> => {
|
||||
async setTorrentSeeders(torrent: ITorrentAttributes, seeders: number): Promise<[number]> {
|
||||
const where = torrent.infoHash
|
||||
? {infoHash: torrent.infoHash}
|
||||
: {provider: torrent.provider, torrentId: torrent.torrentId};
|
||||
@@ -135,11 +144,13 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
{seeders: seeders},
|
||||
{where: where}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
public deleteTorrent = async (infoHash: string): Promise<number> => await Torrent.destroy({where: {infoHash: infoHash}});
|
||||
async deleteTorrent(infoHash: string): Promise<number> {
|
||||
return await Torrent.destroy({where: {infoHash: infoHash}});
|
||||
}
|
||||
|
||||
public createFile = async (file: IFileCreationAttributes): Promise<void> => {
|
||||
async createFile(file: IFileCreationAttributes): Promise<void> {
|
||||
try {
|
||||
const operatingFile = File.build(file);
|
||||
if (operatingFile.id) {
|
||||
@@ -162,22 +173,28 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
this.logger.error(`Failed to create file: ${file.infoHash}`);
|
||||
this.logger.debug("Error: ", error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getFiles = async (infoHash: string): Promise<File[]> => File.findAll({where: {infoHash: infoHash}});
|
||||
async getFiles(infoHash: string): Promise<File[]> {
|
||||
return File.findAll({where: {infoHash: infoHash}});
|
||||
}
|
||||
|
||||
public getFilesBasedOnTitle = async (titleQuery: string): Promise<File[]> => File.findAll({where: {title: {[Op.regexp]: `${titleQuery}`}}});
|
||||
async getFilesBasedOnTitle(titleQuery: string): Promise<File[]> {
|
||||
return File.findAll({where: {title: {[Op.regexp]: `${titleQuery}`}}});
|
||||
}
|
||||
|
||||
public deleteFile = async (id: number): Promise<number> => File.destroy({where: {id: id}});
|
||||
async deleteFile(id: number): Promise<number> {
|
||||
return File.destroy({where: {id: id}});
|
||||
}
|
||||
|
||||
public createSubtitles = async (infoHash: string, subtitles: ISubtitleCreationAttributes[] | undefined): Promise<void | Model<ISubtitleAttributes, ISubtitleCreationAttributes>[]> => {
|
||||
async createSubtitles(infoHash: string, subtitles: ISubtitleCreationAttributes[] | undefined): Promise<void | Model<ISubtitleAttributes, ISubtitleCreationAttributes>[]> {
|
||||
if (subtitles && subtitles.length) {
|
||||
return Subtitle.bulkCreate(subtitles.map(subtitle => ({...subtitle, infoHash: infoHash, title: subtitle.path})));
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
}
|
||||
|
||||
public upsertSubtitles = async (file: File, subtitles: ISubtitleCreationAttributes[] | undefined): Promise<void> => {
|
||||
async upsertSubtitles(file: File, subtitles: ISubtitleCreationAttributes[] | undefined): Promise<void> {
|
||||
if (file.id && subtitles && subtitles.length) {
|
||||
await PromiseHelpers.sequence(subtitles
|
||||
.map(subtitle => {
|
||||
@@ -195,30 +212,38 @@ export class DatabaseRepository implements IDatabaseRepository {
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getSubtitles = async (infoHash: string): Promise<Subtitle[]> => Subtitle.findAll({where: {infoHash: infoHash}});
|
||||
async getSubtitles(infoHash: string): Promise<Subtitle[]> {
|
||||
return Subtitle.findAll({where: {infoHash: infoHash}});
|
||||
}
|
||||
|
||||
public getUnassignedSubtitles = async (): Promise<Subtitle[]> => Subtitle.findAll({where: {fileId: null}});
|
||||
async getUnassignedSubtitles(): Promise<Subtitle[]> {
|
||||
return Subtitle.findAll({where: {fileId: null}});
|
||||
}
|
||||
|
||||
public createContents = async (infoHash: string, contents: IContentCreationAttributes[] | undefined): Promise<void> => {
|
||||
async createContents(infoHash: string, contents: IContentCreationAttributes[] | undefined): Promise<void> {
|
||||
if (contents && contents.length) {
|
||||
await Content.bulkCreate(contents.map(content => ({...content, infoHash})), {ignoreDuplicates: true});
|
||||
await Torrent.update({opened: true}, {where: {infoHash: infoHash}, silent: true});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getContents = async (infoHash: string): Promise<Content[]> => Content.findAll({where: {infoHash: infoHash}});
|
||||
async getContents(infoHash: string): Promise<Content[]> {
|
||||
return Content.findAll({where: {infoHash: infoHash}});
|
||||
}
|
||||
|
||||
public getSkipTorrent = async (infoHash: string): Promise<SkipTorrent> => {
|
||||
async getSkipTorrent(infoHash: string): Promise<SkipTorrent> {
|
||||
const result = await SkipTorrent.findByPk(infoHash);
|
||||
if (!result) {
|
||||
throw new Error(`torrent not found: ${infoHash}`);
|
||||
}
|
||||
return result.dataValues as SkipTorrent;
|
||||
};
|
||||
}
|
||||
|
||||
public createSkipTorrent = async (torrent: ITorrentCreationAttributes): Promise<[SkipTorrent, boolean | null]> => SkipTorrent.upsert({infoHash: torrent.infoHash});
|
||||
async createSkipTorrent(torrent: ITorrentCreationAttributes): Promise<[SkipTorrent, boolean | null]> {
|
||||
return SkipTorrent.upsert({infoHash: torrent.infoHash});
|
||||
}
|
||||
|
||||
private createDatabase = (): Sequelize => {
|
||||
const newDatabase = new Sequelize(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import {ISkipTorrentAttributes, ISkipTorrentCreationAttributes} from "@repository/interfaces/skip_torrent_attributes";
|
||||
import {Column, DataType, Model, Table} from 'sequelize-typescript';
|
||||
|
||||
|
||||
@Table({modelName: 'skip_torrent', timestamps: false})
|
||||
export class SkipTorrent extends Model<ISkipTorrentAttributes, ISkipTorrentCreationAttributes> {
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ import {CacheType} from "@enums/cache_types";
|
||||
import {ICacheOptions} from "@interfaces/cache_options";
|
||||
import {ICacheService} from "@interfaces/cache_service";
|
||||
import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {mongoDbStore} from '@tirke/node-cache-manager-mongodb'
|
||||
import {Cache, createCache, MemoryCache, memoryStore} from 'cache-manager';
|
||||
import {inject, injectable} from "inversify";
|
||||
@@ -23,12 +23,12 @@ export type CacheMethod = () => any;
|
||||
|
||||
@injectable()
|
||||
export class CacheService implements ICacheService {
|
||||
private logger: ILoggingService;
|
||||
@inject(IocTypes.ILoggingService) private logger: ILoggingService;
|
||||
|
||||
private readonly memoryCache: MemoryCache | undefined;
|
||||
private readonly remoteCache: Cache | MemoryCache | undefined;
|
||||
|
||||
constructor(@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.logger = logger;
|
||||
constructor() {
|
||||
if (configurationService.cacheConfig.NO_CACHE) {
|
||||
this.logger.info('Cache is disabled');
|
||||
return;
|
||||
@@ -38,17 +38,21 @@ export class CacheService implements ICacheService {
|
||||
this.remoteCache = this.initiateRemoteCache();
|
||||
}
|
||||
|
||||
public cacheWrapImdbId = (key: string, method: CacheMethod): Promise<CacheMethod> =>
|
||||
this.cacheWrap(CacheType.MongoDb, `${IMDB_ID_PREFIX}:${key}`, method, {ttl: GLOBAL_TTL});
|
||||
cacheWrapImdbId(key: string, method: CacheMethod): Promise<CacheMethod> {
|
||||
return this.cacheWrap(CacheType.MongoDb, `${IMDB_ID_PREFIX}:${key}`, method, {ttl: GLOBAL_TTL});
|
||||
}
|
||||
|
||||
public cacheWrapKitsuId = (key: string, method: CacheMethod): Promise<CacheMethod> =>
|
||||
this.cacheWrap(CacheType.MongoDb, `${KITSU_ID_PREFIX}:${key}`, method, {ttl: GLOBAL_TTL});
|
||||
cacheWrapKitsuId(key: string, method: CacheMethod): Promise<CacheMethod> {
|
||||
return this.cacheWrap(CacheType.MongoDb, `${KITSU_ID_PREFIX}:${key}`, method, {ttl: GLOBAL_TTL});
|
||||
}
|
||||
|
||||
public cacheWrapMetadata = (id: string, method: CacheMethod): Promise<CacheMethod> =>
|
||||
this.cacheWrap(CacheType.Memory, `${METADATA_PREFIX}:${id}`, method, {ttl: MEMORY_TTL});
|
||||
cacheWrapMetadata(id: string, method: CacheMethod): Promise<CacheMethod> {
|
||||
return this.cacheWrap(CacheType.Memory, `${METADATA_PREFIX}:${id}`, method, {ttl: MEMORY_TTL});
|
||||
}
|
||||
|
||||
public cacheTrackers = (method: CacheMethod): Promise<CacheMethod> =>
|
||||
this.cacheWrap(CacheType.Memory, `${TRACKERS_KEY_PREFIX}`, method, {ttl: TRACKERS_TTL});
|
||||
cacheTrackers(method: CacheMethod): Promise<CacheMethod> {
|
||||
return this.cacheWrap(CacheType.Memory, `${TRACKERS_KEY_PREFIX}`, method, {ttl: TRACKERS_TTL});
|
||||
}
|
||||
|
||||
private initiateMemoryCache = (): MemoryCache =>
|
||||
createCache(memoryStore(), {
|
||||
@@ -91,8 +95,7 @@ export class CacheService implements ICacheService {
|
||||
}
|
||||
}
|
||||
|
||||
private cacheWrap = async (
|
||||
cacheType: CacheType, key: string, method: CacheMethod, options: ICacheOptions): Promise<CacheMethod> => {
|
||||
private cacheWrap = async (cacheType: CacheType, key: string, method: CacheMethod, options: ICacheOptions): Promise<CacheMethod> => {
|
||||
const cache = this.getCacheType(cacheType);
|
||||
|
||||
if (configurationService.cacheConfig.NO_CACHE || !cache) {
|
||||
@@ -104,6 +107,5 @@ export class CacheService implements ICacheService {
|
||||
this.logger.debug(`Cache options: ${JSON.stringify(options)}`);
|
||||
|
||||
return cache.wrap(key, method, options.ttl);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -29,5 +29,4 @@ export class LoggingService implements ILoggingService {
|
||||
this.logger.warn(message, args);
|
||||
};
|
||||
}
|
||||
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
@@ -7,7 +7,7 @@ import {IKitsuJsonResponse} from "@interfaces/kitsu_metadata";
|
||||
import {IMetaDataQuery} from "@interfaces/metadata_query";
|
||||
import {IMetadataResponse} from "@interfaces/metadata_response";
|
||||
import {IMetadataService} from "@interfaces/metadata_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import axios from 'axios';
|
||||
import {ResultTypes, search} from 'google-sr';
|
||||
import {inject, injectable} from "inversify";
|
||||
@@ -19,13 +19,9 @@ const TIMEOUT = 60000;
|
||||
|
||||
@injectable()
|
||||
export class MetadataService implements IMetadataService {
|
||||
private cacheService: ICacheService;
|
||||
@inject(IocTypes.ICacheService) private cacheService: ICacheService;
|
||||
|
||||
constructor(@inject(IocTypes.ICacheService) cacheService: ICacheService) {
|
||||
this.cacheService = cacheService;
|
||||
}
|
||||
|
||||
public getKitsuId = async (info: IMetaDataQuery): Promise<number | Error> => {
|
||||
async getKitsuId(info: IMetaDataQuery): Promise<number | Error> {
|
||||
const title = this.escapeTitle(info.title!.replace(/\s\|\s.*/, ''));
|
||||
const year = info.year ? ` ${info.year}` : '';
|
||||
const season = info.season || 0 > 1 ? ` S${info.season}` : '';
|
||||
@@ -42,9 +38,9 @@ export class MetadataService implements IMetadataService {
|
||||
throw new Error('No search results');
|
||||
}
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
||||
public getImdbId = async (info: IMetaDataQuery): Promise<string | undefined> => {
|
||||
async getImdbId(info: IMetaDataQuery): Promise<string | undefined> {
|
||||
const name = this.escapeTitle(info.title!);
|
||||
const year = info.year || (info.date && info.date.slice(0, 4));
|
||||
const key = `${name}_${year || 'NA'}_${info.type}`;
|
||||
@@ -61,9 +57,9 @@ export class MetadataService implements IMetadataService {
|
||||
const imdbIdFallback = await this.getIMDbIdFromGoogle(googleQuery);
|
||||
return imdbIdFallback && 'tt' + imdbIdFallback.toString().replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public getMetadata = (query: IMetaDataQuery): Promise<IMetadataResponse | Error> => {
|
||||
async getMetadata(query: IMetaDataQuery): Promise<IMetadataResponse | Error> {
|
||||
if (!query.id) {
|
||||
return Promise.reject("no valid id provided");
|
||||
}
|
||||
@@ -72,24 +68,27 @@ export class MetadataService implements IMetadataService {
|
||||
const metaType = query.type === TorrentType.Movie ? TorrentType.Movie : TorrentType.Series;
|
||||
const isImdbId = Boolean(key.toString().match(/^tt\d+$/));
|
||||
|
||||
return this.cacheService.cacheWrapMetadata(key.toString(), () => {
|
||||
switch (isImdbId) {
|
||||
case true:
|
||||
return this.requestMetadata(`${CINEMETA_URL}/meta/imdb/${key}.json`, this.handleCinemetaResponse);
|
||||
default:
|
||||
return this.requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`, this.handleKitsuResponse)
|
||||
}})
|
||||
.catch(() => {
|
||||
try {
|
||||
try {
|
||||
return await this.cacheService.cacheWrapMetadata(key.toString(), () => {
|
||||
switch (isImdbId) {
|
||||
case true:
|
||||
return this.requestMetadata(`${CINEMETA_URL}/meta/imdb/${key}.json`, this.handleCinemetaResponse);
|
||||
default:
|
||||
return this.requestMetadata(`${KITSU_URL}/meta/${metaType}/${key}.json`, this.handleKitsuResponse)
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
// try different type in case there was a mismatch
|
||||
const otherType = metaType === TorrentType.Movie ? TorrentType.Series : TorrentType.Movie;
|
||||
return this.requestMetadata(`${CINEMETA_URL}/meta/${otherType}/${key}.json`, this.handleCinemetaResponse)
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new Error(`failed metadata query ${key} due: ${error.message}`);
|
||||
});
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`failed metadata query ${key} due: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
public isEpisodeImdbId = async (imdbId: string | undefined): Promise<boolean> => {
|
||||
async isEpisodeImdbId(imdbId: string | undefined): Promise<boolean> {
|
||||
if (!imdbId || !imdbId.toString().match(/^tt\d+$/)) {
|
||||
return false;
|
||||
}
|
||||
@@ -100,21 +99,23 @@ export class MetadataService implements IMetadataService {
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public escapeTitle = (title: string): string => title.toLowerCase()
|
||||
.normalize('NFKD') // normalize non-ASCII characters
|
||||
.replace(/[\u0300-\u036F]/g, '')
|
||||
.replace(/&/g, 'and')
|
||||
.replace(/[;, ~./]+/g, ' ') // replace dots, commas or underscores with spaces
|
||||
.replace(/[^\w \-()×+#@!'\u0400-\u04ff]+/g, '') // remove all non-alphanumeric chars
|
||||
.replace(/^\d{1,2}[.#\s]+(?=(?:\d+[.\s]*)?[\u0400-\u04ff])/i, '') // remove russian movie numbering
|
||||
.replace(/\s{2,}/, ' ') // replace multiple spaces
|
||||
.trim();
|
||||
escapeTitle(title: string): string {
|
||||
return title.toLowerCase()
|
||||
.normalize('NFKD') // normalize non-ASCII characters
|
||||
.replace(/[\u0300-\u036F]/g, '')
|
||||
.replace(/&/g, 'and')
|
||||
.replace(/[;, ~./]+/g, ' ') // replace dots, commas or underscores with spaces
|
||||
.replace(/[^\w \-()×+#@!'\u0400-\u04ff]+/g, '') // remove all non-alphanumeric chars
|
||||
.replace(/^\d{1,2}[.#\s]+(?=(?:\d+[.\s]*)?[\u0400-\u04ff])/i, '') // remove russian movie numbering
|
||||
.replace(/\s{2,}/, ' ') // replace multiple spaces
|
||||
.trim();
|
||||
}
|
||||
|
||||
private requestMetadata = async (url: string, handler: (body: unknown) => IMetadataResponse): Promise<IMetadataResponse> => {
|
||||
try {
|
||||
const response = await axios.get(url, { timeout: TIMEOUT });
|
||||
const response = await axios.get(url, {timeout: TIMEOUT});
|
||||
const body = response.data;
|
||||
return handler(body);
|
||||
} catch (error) {
|
||||
@@ -231,4 +232,3 @@ export class MetadataService implements IMetadataService {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@ import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {IParsedTorrent} from "@interfaces/parsed_torrent";
|
||||
import {ITorrentDownloadService} from "@interfaces/torrent_download_service";
|
||||
import {ITorrentFileCollection} from "@interfaces/torrent_file_collection";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IContentAttributes} from "@repository/interfaces/content_attributes";
|
||||
import {IFileAttributes} from "@repository/interfaces/file_attributes";
|
||||
import {ISubtitleAttributes} from "@repository/interfaces/subtitle_attributes";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {encode} from 'magnet-uri';
|
||||
import {parse} from "parse-torrent-title";
|
||||
@@ -25,7 +25,8 @@ interface ITorrentFile {
|
||||
|
||||
@injectable()
|
||||
export class TorrentDownloadService implements ITorrentDownloadService {
|
||||
private logger: ILoggingService;
|
||||
@inject(IocTypes.ILoggingService) private logger: ILoggingService;
|
||||
|
||||
private engineOptions: TorrentEngineOptions = {
|
||||
connections: configurationService.torrentConfig.MAX_CONNECTIONS_PER_TORRENT,
|
||||
uploads: 0,
|
||||
@@ -34,11 +35,7 @@ export class TorrentDownloadService implements ITorrentDownloadService {
|
||||
tracker: true,
|
||||
};
|
||||
|
||||
constructor(@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public getTorrentFiles = async (torrent: IParsedTorrent, timeout: number = 30000): Promise<ITorrentFileCollection> => {
|
||||
async getTorrentFiles(torrent: IParsedTorrent, timeout: number = 30000): Promise<ITorrentFileCollection> {
|
||||
const torrentFiles: ITorrentFile[] = await this.filesFromTorrentStream(torrent, timeout);
|
||||
|
||||
const videos = this.filterVideos(torrent, torrentFiles);
|
||||
@@ -50,7 +47,7 @@ export class TorrentDownloadService implements ITorrentDownloadService {
|
||||
videos: videos,
|
||||
subtitles: subtitles,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
private filesFromTorrentStream = async (torrent: IParsedTorrent, timeout: number): Promise<ITorrentFile[]> => {
|
||||
if (!torrent.infoHash) {
|
||||
@@ -165,4 +162,3 @@ export class TorrentDownloadService implements ITorrentDownloadService {
|
||||
size: file.length,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import {ITorrentEntriesService} from "@interfaces/torrent_entries_service";
|
||||
import {ITorrentFileCollection} from "@interfaces/torrent_file_collection";
|
||||
import {ITorrentFileService} from "@interfaces/torrent_file_service";
|
||||
import {ITorrentSubtitleService} from "@interfaces/torrent_subtitle_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IDatabaseRepository} from "@repository/interfaces/database_repository";
|
||||
import {IFileCreationAttributes} from "@repository/interfaces/file_attributes";
|
||||
import {ISubtitleAttributes} from "@repository/interfaces/subtitle_attributes";
|
||||
@@ -17,30 +16,19 @@ import {File} from "@repository/models/file";
|
||||
import {SkipTorrent} from "@repository/models/skipTorrent";
|
||||
import {Subtitle} from "@repository/models/subtitle";
|
||||
import {Torrent} from "@repository/models/torrent";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
import {parse} from 'parse-torrent-title';
|
||||
|
||||
@injectable()
|
||||
export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
private metadataService: IMetadataService;
|
||||
private logger: ILoggingService;
|
||||
private fileService: ITorrentFileService;
|
||||
private subtitleService: ITorrentSubtitleService;
|
||||
private repository: IDatabaseRepository;
|
||||
@inject(IocTypes.IMetadataService) private metadataService: IMetadataService;
|
||||
@inject(IocTypes.ILoggingService) private logger: ILoggingService;
|
||||
@inject(IocTypes.ITorrentFileService) private fileService: ITorrentFileService;
|
||||
@inject(IocTypes.ITorrentSubtitleService) private subtitleService: ITorrentSubtitleService;
|
||||
@inject(IocTypes.IDatabaseRepository) private repository: IDatabaseRepository;
|
||||
|
||||
constructor(@inject(IocTypes.IMetadataService) metadataService: IMetadataService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService,
|
||||
@inject(IocTypes.ITorrentFileService) fileService: ITorrentFileService,
|
||||
@inject(IocTypes.ITorrentSubtitleService) torrentSubtitleService: ITorrentSubtitleService,
|
||||
@inject(IocTypes.IDatabaseRepository) repository: IDatabaseRepository) {
|
||||
this.metadataService = metadataService;
|
||||
this.logger = logger;
|
||||
this.fileService = fileService;
|
||||
this.subtitleService = torrentSubtitleService;
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public createTorrentEntry = async (torrent: IParsedTorrent, overwrite = false): Promise<void> => {
|
||||
async createTorrentEntry(torrent: IParsedTorrent, overwrite = false): Promise<void> {
|
||||
if (!torrent.title) {
|
||||
this.logger.warn(`No title found for ${torrent.provider} [${torrent.infoHash}]`);
|
||||
return;
|
||||
@@ -108,15 +96,19 @@ export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
return this.repository.createFile(newVideo)
|
||||
})))
|
||||
.then(() => this.logger.info(`Created ${torrent.provider} entry for [${torrent.infoHash}] ${torrent.title}`));
|
||||
};
|
||||
}
|
||||
|
||||
public createSkipTorrentEntry: (torrent: ITorrentCreationAttributes) => Promise<[SkipTorrent, boolean | null]> = async (torrent: ITorrentCreationAttributes) => this.repository.createSkipTorrent(torrent);
|
||||
async createSkipTorrentEntry(torrent: ITorrentCreationAttributes): Promise<[SkipTorrent, boolean | null]> {
|
||||
return this.repository.createSkipTorrent(torrent);
|
||||
}
|
||||
|
||||
public getStoredTorrentEntry = async (torrent: Torrent): Promise<Torrent | SkipTorrent | null | undefined> => this.repository.getSkipTorrent(torrent.infoHash)
|
||||
.catch(() => this.repository.getTorrent(torrent.dataValues))
|
||||
.catch(() => undefined);
|
||||
async getStoredTorrentEntry(torrent: Torrent): Promise<Torrent | SkipTorrent | null | undefined> {
|
||||
return this.repository.getSkipTorrent(torrent.infoHash)
|
||||
.catch(() => this.repository.getTorrent(torrent.dataValues))
|
||||
.catch(() => undefined);
|
||||
}
|
||||
|
||||
public checkAndUpdateTorrent = async (torrent: IParsedTorrent): Promise<boolean> => {
|
||||
async checkAndUpdateTorrent(torrent: IParsedTorrent): Promise<boolean> {
|
||||
const query: ITorrentAttributes = {
|
||||
infoHash: torrent.infoHash,
|
||||
provider: torrent.provider,
|
||||
@@ -146,9 +138,9 @@ export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
.then(() => this.updateTorrentSeeders(existingTorrent.dataValues))
|
||||
.then(() => Promise.resolve(true))
|
||||
.catch(() => Promise.reject(false));
|
||||
};
|
||||
}
|
||||
|
||||
public createTorrentContents = async (torrent: Torrent): Promise<void> => {
|
||||
async createTorrentContents(torrent: Torrent): Promise<void> {
|
||||
if (torrent.opened) {
|
||||
return;
|
||||
}
|
||||
@@ -213,9 +205,9 @@ export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
})
|
||||
.then(() => this.logger.info(`Created contents for ${torrent.provider} [${torrent.infoHash}] ${torrent.title}`))
|
||||
.catch(error => this.logger.error(`Failed saving contents for [${torrent.infoHash}] ${torrent.title}`, error));
|
||||
};
|
||||
}
|
||||
|
||||
public updateTorrentSeeders = async (torrent: ITorrentAttributes): Promise<[number]> => {
|
||||
async updateTorrentSeeders(torrent: ITorrentAttributes): Promise<[number]> {
|
||||
if (!(torrent.infoHash || (torrent.provider && torrent.torrentId)) || !Number.isInteger(torrent.seeders)) {
|
||||
return [0];
|
||||
}
|
||||
@@ -231,7 +223,7 @@ export class TorrentEntriesService implements ITorrentEntriesService {
|
||||
this.logger.warn('Failed updating seeders:', error);
|
||||
return [0];
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
private assignKitsuId = async (kitsuQuery: IMetaDataQuery, torrent: IParsedTorrent): Promise<void> => {
|
||||
await this.metadataService.getKitsuId(kitsuQuery)
|
||||
|
||||
@@ -7,14 +7,13 @@ import {IMetaDataQuery} from "@interfaces/metadata_query";
|
||||
import {IMetadataResponse} from "@interfaces/metadata_response";
|
||||
import {IMetadataService} from "@interfaces/metadata_service";
|
||||
import {IParsedTorrent} from "@interfaces/parsed_torrent";
|
||||
import {ISeasonEpisodeMap} from "@interfaces/season_episode_map";
|
||||
import {ITorrentDownloadService} from "@interfaces/torrent_download_service";
|
||||
import {ITorrentFileCollection} from "@interfaces/torrent_file_collection";
|
||||
import {ITorrentFileService} from "@interfaces/torrent_file_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IContentAttributes} from "@repository/interfaces/content_attributes";
|
||||
import {IFileAttributes} from "@repository/interfaces/file_attributes";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import Bottleneck from 'bottleneck';
|
||||
import {inject, injectable} from "inversify";
|
||||
import moment from 'moment';
|
||||
@@ -23,25 +22,20 @@ import {parse} from 'parse-torrent-title';
|
||||
const MIN_SIZE: number = 5 * 1024 * 1024; // 5 MB
|
||||
const MULTIPLE_FILES_SIZE = 4 * 1024 * 1024 * 1024; // 4 GB
|
||||
|
||||
type SeasonEpisodeMap = Record<number, Record<number, ICommonVideoMetadata>>;
|
||||
|
||||
@injectable()
|
||||
export class TorrentFileService implements ITorrentFileService {
|
||||
private metadataService: IMetadataService;
|
||||
private torrentDownloadService: ITorrentDownloadService;
|
||||
private logger: ILoggingService;
|
||||
@inject(IocTypes.IMetadataService) metadataService: IMetadataService;
|
||||
@inject(IocTypes.ITorrentDownloadService) torrentDownloadService: ITorrentDownloadService;
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService;
|
||||
|
||||
private readonly imdb_limiter: Bottleneck = new Bottleneck({
|
||||
maxConcurrent: configurationService.metadataConfig.IMDB_CONCURRENT,
|
||||
minTime: configurationService.metadataConfig.IMDB_INTERVAL_MS
|
||||
});
|
||||
|
||||
constructor(@inject(IocTypes.IMetadataService) metadataService: IMetadataService,
|
||||
@inject(IocTypes.ITorrentDownloadService) torrentDownloadService: ITorrentDownloadService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.metadataService = metadataService;
|
||||
this.torrentDownloadService = torrentDownloadService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public parseTorrentFiles = async (torrent: IParsedTorrent): Promise<ITorrentFileCollection> => {
|
||||
async parseTorrentFiles(torrent: IParsedTorrent): Promise<ITorrentFileCollection> {
|
||||
if (!torrent.title) {
|
||||
return Promise.reject(new Error('Torrent title is missing'));
|
||||
}
|
||||
@@ -74,9 +68,9 @@ export class TorrentFileService implements ITorrentFileService {
|
||||
}
|
||||
|
||||
return this.parseSeriesFiles(torrent, metadata)
|
||||
};
|
||||
}
|
||||
|
||||
public isPackTorrent = (torrent: IParsedTorrent): boolean => {
|
||||
isPackTorrent(torrent: IParsedTorrent): boolean {
|
||||
if (torrent.isPack) {
|
||||
return true;
|
||||
}
|
||||
@@ -96,7 +90,7 @@ export class TorrentFileService implements ITorrentFileService {
|
||||
const hasSingleEpisode: boolean = Boolean(Number.isInteger(parsedInfo.episode) || (!parsedInfo.episodes && parsedInfo.date));
|
||||
|
||||
return hasMultipleEpisodes && !hasSingleEpisode;
|
||||
};
|
||||
}
|
||||
|
||||
private parseSeriesVideos = (torrent: IParsedTorrent, videos: IFileAttributes[]): IFileAttributes[] => {
|
||||
const parsedTorrentName = parse(torrent.title!);
|
||||
@@ -482,7 +476,7 @@ export class TorrentFileService implements ITorrentFileService {
|
||||
|
||||
const seriesMapping = metadata.videos
|
||||
.filter(video => video.season !== undefined && Number.isInteger(video.season) && video.episode !== undefined && Number.isInteger(video.episode))
|
||||
.reduce<ISeasonEpisodeMap>((map, video) => {
|
||||
.reduce<SeasonEpisodeMap>((map, video) => {
|
||||
if (video.season !== undefined && video.episode !== undefined) {
|
||||
const episodeMap = map[video.season] || {};
|
||||
episodeMap[video.episode] = video;
|
||||
@@ -737,25 +731,3 @@ export class TorrentFileService implements ITorrentFileService {
|
||||
|
||||
private mod100 = (episode: number): number => episode % 100;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -4,25 +4,17 @@ import {IParsedTorrent} from "@interfaces/parsed_torrent";
|
||||
import {ITorrentEntriesService} from "@interfaces/torrent_entries_service";
|
||||
import {ITorrentProcessingService} from "@interfaces/torrent_processing_service";
|
||||
import {ITrackerService} from "@interfaces/tracker_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {IIngestedTorrentAttributes} from "@repository/interfaces/ingested_torrent_attributes";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
|
||||
@injectable()
|
||||
export class TorrentProcessingService implements ITorrentProcessingService {
|
||||
private torrentEntriesService: ITorrentEntriesService;
|
||||
private logger: ILoggingService;
|
||||
private trackerService: ITrackerService;
|
||||
@inject(IocTypes.ITorrentEntriesService) torrentEntriesService: ITorrentEntriesService;
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService;
|
||||
@inject(IocTypes.ITrackerService) trackerService: ITrackerService;
|
||||
|
||||
constructor(@inject(IocTypes.ITorrentEntriesService) torrentEntriesService: ITorrentEntriesService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService,
|
||||
@inject(IocTypes.ITrackerService) trackerService: ITrackerService) {
|
||||
this.torrentEntriesService = torrentEntriesService;
|
||||
this.logger = logger;
|
||||
this.trackerService = trackerService;
|
||||
}
|
||||
|
||||
public processTorrentRecord = async (torrent: IIngestedTorrentAttributes): Promise<void> => {
|
||||
async processTorrentRecord(torrent: IIngestedTorrentAttributes): Promise<void> {
|
||||
const {category} = torrent;
|
||||
const type = category === 'tv' ? TorrentType.Series : TorrentType.Movie;
|
||||
const torrentInfo: IParsedTorrent = await this.parseTorrent(torrent, type);
|
||||
@@ -34,7 +26,7 @@ export class TorrentProcessingService implements ITorrentProcessingService {
|
||||
}
|
||||
|
||||
return this.torrentEntriesService.createTorrentEntry(torrentInfo, false);
|
||||
};
|
||||
}
|
||||
|
||||
private assignTorrentTrackers = async (): Promise<string> => {
|
||||
const trackers = await this.trackerService.getTrackers();
|
||||
@@ -65,4 +57,3 @@ export class TorrentProcessingService implements ITorrentProcessingService {
|
||||
return torrent.imdb;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import {parse} from 'parse-torrent-title';
|
||||
|
||||
@injectable()
|
||||
export class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
public assignSubtitles = (fileCollection: ITorrentFileCollection): ITorrentFileCollection => {
|
||||
assignSubtitles(fileCollection: ITorrentFileCollection): ITorrentFileCollection {
|
||||
if (fileCollection.videos && fileCollection.videos.length && fileCollection.subtitles && fileCollection.subtitles.length) {
|
||||
if (fileCollection.videos.length === 1) {
|
||||
const matchingSubtitles = fileCollection.subtitles.filter(subtitle =>
|
||||
@@ -33,7 +33,7 @@ export class TorrentSubtitleService implements ITorrentSubtitleService {
|
||||
return {...fileCollection, subtitles: unassignedSubs};
|
||||
}
|
||||
return fileCollection;
|
||||
};
|
||||
}
|
||||
|
||||
private parseVideo = (video: IFileAttributes): IFileAttributes => {
|
||||
const fileName = video.title?.split('/')?.pop()?.replace(/\.(\w{2,4})$/, '') || '';
|
||||
|
||||
@@ -1,24 +1,20 @@
|
||||
import {ICacheService} from "@interfaces/cache_service";
|
||||
import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {ITrackerService} from "@interfaces/tracker_service";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import axios, {AxiosResponse} from 'axios';
|
||||
import {inject, injectable} from "inversify";
|
||||
|
||||
@injectable()
|
||||
export class TrackerService implements ITrackerService {
|
||||
private cacheService: ICacheService;
|
||||
private logger: ILoggingService;
|
||||
@inject(IocTypes.ICacheService) cacheService: ICacheService;
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService;
|
||||
|
||||
constructor(@inject(IocTypes.ICacheService) cacheService: ICacheService,
|
||||
@inject(IocTypes.ILoggingService) logger: ILoggingService) {
|
||||
this.cacheService = cacheService;
|
||||
this.logger = logger;
|
||||
async getTrackers(): Promise<string[]> {
|
||||
return this.cacheService.cacheTrackers(this.downloadTrackers);
|
||||
}
|
||||
|
||||
public getTrackers = async (): Promise<string[]> => this.cacheService.cacheTrackers(this.downloadTrackers);
|
||||
|
||||
private downloadTrackers = async (): Promise<string[]> => {
|
||||
const response: AxiosResponse<string> = await axios.get(configurationService.trackerConfig.TRACKERS_URL);
|
||||
const trackersListText: string = response.data;
|
||||
@@ -38,4 +34,3 @@ export class TrackerService implements ITrackerService {
|
||||
return urlTrackers;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import "reflect-metadata"; // required
|
||||
import {ICompositionalRoot} from "@interfaces/composition_root";
|
||||
import {serviceContainer} from "@models/inversify_config";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {ICompositionalRoot} from "@setup/composition_root";
|
||||
import {serviceContainer} from "@setup/inversify_config";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
|
||||
(async (): Promise<void> => {
|
||||
const compositionalRoot = serviceContainer.get<ICompositionalRoot>(IocTypes.ICompositionalRoot);
|
||||
|
||||
22
src/node/consumer/src/setup/composition_root.ts
Normal file
22
src/node/consumer/src/setup/composition_root.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import {IProcessTorrentsJob} from "@interfaces/process_torrents_job";
|
||||
import {ITrackerService} from "@interfaces/tracker_service";
|
||||
import {IDatabaseRepository} from "@repository/interfaces/database_repository";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {inject, injectable} from "inversify";
|
||||
|
||||
export interface ICompositionalRoot {
|
||||
start(): Promise<void>;
|
||||
}
|
||||
|
||||
@injectable()
|
||||
export class CompositionalRoot implements ICompositionalRoot {
|
||||
@inject(IocTypes.ITrackerService) trackerService: ITrackerService;
|
||||
@inject(IocTypes.IDatabaseRepository) databaseRepository: IDatabaseRepository;
|
||||
@inject(IocTypes.IProcessTorrentsJob) processTorrentsJob: IProcessTorrentsJob;
|
||||
|
||||
async start(): Promise<void> {
|
||||
await this.trackerService.getTrackers();
|
||||
await this.databaseRepository.connect();
|
||||
await this.processTorrentsJob.listenToQueue();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
import {ICacheService} from "@interfaces/cache_service";
|
||||
import {ICompositionalRoot} from "@interfaces/composition_root";
|
||||
import {ILoggingService} from "@interfaces/logging_service";
|
||||
import {IMetadataService} from "@interfaces/metadata_service";
|
||||
import {IProcessTorrentsJob} from "@interfaces/process_torrents_job";
|
||||
@@ -10,8 +9,6 @@ import {ITorrentProcessingService} from "@interfaces/torrent_processing_service"
|
||||
import {ITorrentSubtitleService} from "@interfaces/torrent_subtitle_service";
|
||||
import {ITrackerService} from "@interfaces/tracker_service";
|
||||
import {ProcessTorrentsJob} from "@jobs/process_torrents_job";
|
||||
import {CompositionalRoot} from "@models/composition_root";
|
||||
import {IocTypes} from "@models/ioc_types";
|
||||
import {DatabaseRepository} from "@repository/database_repository";
|
||||
import {IDatabaseRepository} from "@repository/interfaces/database_repository";
|
||||
import {CacheService} from "@services/cache_service";
|
||||
@@ -23,6 +20,8 @@ import {TorrentFileService} from "@services/torrent_file_service";
|
||||
import {TorrentProcessingService} from "@services/torrent_processing_service";
|
||||
import {TorrentSubtitleService} from "@services/torrent_subtitle_service";
|
||||
import {TrackerService} from "@services/tracker_service";
|
||||
import {ICompositionalRoot, CompositionalRoot} from "@setup/composition_root";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
|
||||
const serviceContainer = new Container();
|
||||
@@ -1,6 +1,8 @@
|
||||
import "reflect-metadata"; // required
|
||||
import {ILoggingService} from '@interfaces/logging_service';
|
||||
import {CacheMethod, CacheService} from '@services/cache_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
|
||||
jest.mock('@services/configuration_service', () => {
|
||||
return {
|
||||
@@ -49,9 +51,12 @@ describe('CacheService Tests', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
process.env.LOG_LEVEL = 'debug';
|
||||
loggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
cacheMethod = jest.fn().mockResolvedValue({});
|
||||
cacheService = new CacheService(loggingService);
|
||||
loggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
const container = new Container();
|
||||
container.bind<CacheService>(CacheService).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(loggingService);
|
||||
cacheService = container.get(CacheService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -113,7 +118,6 @@ describe('CacheService Tests', () => {
|
||||
}
|
||||
});
|
||||
|
||||
cacheService = new CacheService(loggingService);
|
||||
const result = await cacheService.cacheWrapImdbId('testKey', cacheMethod);
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
@@ -2,6 +2,8 @@ import "reflect-metadata"; // required
|
||||
import {ICacheService} from "@interfaces/cache_service";
|
||||
import {IMetadataResponse} from "@interfaces/metadata_response";
|
||||
import {MetadataService} from "@services/metadata_service";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
import {setupServer} from "msw/node";
|
||||
import * as responses from "./mock-responses/metadata_mock_responses";
|
||||
|
||||
@@ -38,7 +40,10 @@ describe('MetadataService Tests', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
mockCacheService = jest.requireMock<ICacheService>('@services/cache_service');
|
||||
metadataService = new MetadataService(mockCacheService);
|
||||
const container = new Container();
|
||||
container.bind<MetadataService>(MetadataService).toSelf();
|
||||
container.bind<ICacheService>(IocTypes.ICacheService).toConstantValue(mockCacheService);
|
||||
metadataService = container.get(MetadataService);
|
||||
});
|
||||
|
||||
it("should get kitsu id", async () => {
|
||||
|
||||
@@ -3,7 +3,9 @@ import {ILoggingService} from '@interfaces/logging_service';
|
||||
import {ITorrentProcessingService} from '@interfaces/torrent_processing_service';
|
||||
import {ProcessTorrentsJob} from '@jobs/process_torrents_job';
|
||||
import {configurationService} from '@services/configuration_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import client, {ConsumeMessage} from 'amqplib';
|
||||
import {Container} from "inversify";
|
||||
|
||||
jest.mock('@services/configuration_service', () => {
|
||||
return {
|
||||
@@ -56,7 +58,12 @@ describe('ProcessTorrentsJob Tests', () => {
|
||||
jest.clearAllMocks();
|
||||
loggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
torrentProcessingService = jest.requireMock('@services/torrent_processing_service');
|
||||
processTorrentsJob = new ProcessTorrentsJob(torrentProcessingService, loggingService);
|
||||
|
||||
const container = new Container();
|
||||
container.bind<ProcessTorrentsJob>(ProcessTorrentsJob).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(loggingService);
|
||||
container.bind<ITorrentProcessingService>(IocTypes.ITorrentProcessingService).toConstantValue(torrentProcessingService);
|
||||
processTorrentsJob = container.get(ProcessTorrentsJob);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
||||
@@ -2,6 +2,8 @@ import "reflect-metadata"; // required
|
||||
import {ILoggingService} from '@interfaces/logging_service';
|
||||
import {IParsedTorrent} from "@interfaces/parsed_torrent";
|
||||
import {TorrentDownloadService} from '@services/torrent_download_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
import torrentStream from 'torrent-stream';
|
||||
|
||||
jest.mock('@services/logging_service', () => {
|
||||
@@ -27,7 +29,11 @@ describe('TorrentDownloadService', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockLoggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
torrentDownloadService = new TorrentDownloadService(mockLoggingService);
|
||||
|
||||
const container = new Container();
|
||||
container.bind<TorrentDownloadService>(TorrentDownloadService).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(mockLoggingService);
|
||||
torrentDownloadService = container.get(TorrentDownloadService);
|
||||
});
|
||||
|
||||
it('should get torrent files', async () => {
|
||||
|
||||
@@ -11,6 +11,8 @@ import {IFileAttributes} from "@repository/interfaces/file_attributes";
|
||||
import {ITorrentCreationAttributes} from "@repository/interfaces/torrent_attributes";
|
||||
import {Torrent} from "@repository/models/torrent";
|
||||
import {TorrentEntriesService} from "@services/torrent_entries_service";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
|
||||
jest.mock('@services/logging_service', () => {
|
||||
return {
|
||||
@@ -69,18 +71,26 @@ describe('TorrentEntriesService Tests', () => {
|
||||
mockSubtitleService = jest.requireMock<ITorrentSubtitleService>('@services/torrent_subtitle_service');
|
||||
mockLoggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
mockDatabaseRepository = jest.requireMock<IDatabaseRepository>('@repository/database_repository');
|
||||
torrentEntriesService = new TorrentEntriesService(mockMetadataService, mockLoggingService, mockFileService , mockSubtitleService, mockDatabaseRepository);
|
||||
|
||||
const container = new Container();
|
||||
container.bind<TorrentEntriesService>(TorrentEntriesService).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(mockLoggingService);
|
||||
container.bind<ITorrentFileService>(IocTypes.ITorrentFileService).toConstantValue(mockFileService);
|
||||
container.bind<ITorrentSubtitleService>(IocTypes.ITorrentSubtitleService).toConstantValue(mockSubtitleService);
|
||||
container.bind<IDatabaseRepository>(IocTypes.IDatabaseRepository).toConstantValue(mockDatabaseRepository);
|
||||
container.bind<IMetadataService>(IocTypes.IMetadataService).toConstantValue(mockMetadataService);
|
||||
torrentEntriesService = container.get(TorrentEntriesService);
|
||||
});
|
||||
|
||||
it('should create a torrent entry', async () => {
|
||||
const torrent : IParsedTorrent = {
|
||||
const torrent: IParsedTorrent = {
|
||||
title: 'Test title',
|
||||
provider: 'Test provider',
|
||||
infoHash: 'Test infoHash',
|
||||
type: TorrentType.Movie,
|
||||
};
|
||||
|
||||
const fileCollection : ITorrentFileCollection = {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{
|
||||
fileIndex: 0,
|
||||
title: 'Test video',
|
||||
@@ -91,9 +101,9 @@ describe('TorrentEntriesService Tests', () => {
|
||||
subtitles: [],
|
||||
};
|
||||
|
||||
const fileCollectionWithSubtitles : ITorrentFileCollection = {
|
||||
const fileCollectionWithSubtitles: ITorrentFileCollection = {
|
||||
...fileCollection,
|
||||
subtitles: [ {
|
||||
subtitles: [{
|
||||
fileId: 0,
|
||||
title: 'Test subtitle',
|
||||
fileIndex: 0,
|
||||
@@ -109,7 +119,11 @@ describe('TorrentEntriesService Tests', () => {
|
||||
|
||||
await torrentEntriesService.createTorrentEntry(torrent);
|
||||
|
||||
expect(mockMetadataService.getImdbId).toHaveBeenCalledWith({ title: 'Test title', year: undefined, type: TorrentType.Movie });
|
||||
expect(mockMetadataService.getImdbId).toHaveBeenCalledWith({
|
||||
title: 'Test title',
|
||||
year: undefined,
|
||||
type: TorrentType.Movie
|
||||
});
|
||||
expect(mockFileService.parseTorrentFiles).toHaveBeenCalledWith(torrent);
|
||||
expect(mockFileService.parseTorrentFiles).toHaveReturnedWith(Promise.resolve(fileCollection));
|
||||
expect(mockSubtitleService.assignSubtitles).toHaveBeenCalledWith(fileCollection);
|
||||
@@ -118,14 +132,14 @@ describe('TorrentEntriesService Tests', () => {
|
||||
});
|
||||
|
||||
it('should assign imdbId correctly', async () => {
|
||||
const torrent : IParsedTorrent = {
|
||||
const torrent: IParsedTorrent = {
|
||||
title: 'Test title',
|
||||
provider: 'Test provider',
|
||||
infoHash: 'Test infoHash',
|
||||
type: TorrentType.Movie,
|
||||
};
|
||||
|
||||
const fileCollection : ITorrentFileCollection = {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{
|
||||
fileIndex: 0,
|
||||
title: 'Test video',
|
||||
@@ -136,9 +150,9 @@ describe('TorrentEntriesService Tests', () => {
|
||||
subtitles: [],
|
||||
};
|
||||
|
||||
const fileCollectionWithSubtitles : ITorrentFileCollection = {
|
||||
const fileCollectionWithSubtitles: ITorrentFileCollection = {
|
||||
...fileCollection,
|
||||
subtitles: [ {
|
||||
subtitles: [{
|
||||
fileId: 0,
|
||||
title: 'Test subtitle',
|
||||
fileIndex: 0,
|
||||
@@ -159,14 +173,14 @@ describe('TorrentEntriesService Tests', () => {
|
||||
});
|
||||
|
||||
it('should assign kitsuId correctly', async () => {
|
||||
const torrent : IParsedTorrent = {
|
||||
const torrent: IParsedTorrent = {
|
||||
title: 'Test title',
|
||||
provider: 'Test provider',
|
||||
infoHash: 'Test infoHash',
|
||||
type: TorrentType.Anime,
|
||||
};
|
||||
|
||||
const fileCollection : ITorrentFileCollection = {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{
|
||||
fileIndex: 0,
|
||||
title: 'Test video',
|
||||
@@ -177,9 +191,9 @@ describe('TorrentEntriesService Tests', () => {
|
||||
subtitles: [],
|
||||
};
|
||||
|
||||
const fileCollectionWithSubtitles : ITorrentFileCollection = {
|
||||
const fileCollectionWithSubtitles: ITorrentFileCollection = {
|
||||
...fileCollection,
|
||||
subtitles: [ {
|
||||
subtitles: [{
|
||||
fileId: 0,
|
||||
title: 'Test subtitle',
|
||||
fileIndex: 0,
|
||||
@@ -240,7 +254,7 @@ describe('TorrentEntriesService Tests', () => {
|
||||
});
|
||||
|
||||
it('should check and update torrent', async () => {
|
||||
const torrent : IParsedTorrent = {
|
||||
const torrent: IParsedTorrent = {
|
||||
title: 'Test title',
|
||||
provider: 'Test provider',
|
||||
infoHash: 'Test infoHash',
|
||||
@@ -248,13 +262,13 @@ describe('TorrentEntriesService Tests', () => {
|
||||
seeders: 1,
|
||||
};
|
||||
|
||||
const files : IFileAttributes[] = [{
|
||||
const files: IFileAttributes[] = [{
|
||||
infoHash: 'Test infoHash',
|
||||
fileIndex: 0,
|
||||
title: 'Test title',
|
||||
path: 'Test path',
|
||||
size: 123456,
|
||||
},{
|
||||
}, {
|
||||
infoHash: 'Test infoHash 2',
|
||||
fileIndex: 1,
|
||||
title: 'Test title 2',
|
||||
@@ -264,13 +278,13 @@ describe('TorrentEntriesService Tests', () => {
|
||||
|
||||
const torrentInstance = {
|
||||
...torrent,
|
||||
dataValues:{ ...torrent},
|
||||
dataValues: {...torrent},
|
||||
save: jest.fn().mockResolvedValue(torrent),
|
||||
};
|
||||
|
||||
const filesInstance = {
|
||||
...files,
|
||||
dataValues:{ ...files},
|
||||
dataValues: {...files},
|
||||
save: jest.fn().mockResolvedValue(files),
|
||||
};
|
||||
|
||||
@@ -307,7 +321,7 @@ describe('TorrentEntriesService Tests', () => {
|
||||
}
|
||||
} as Torrent;
|
||||
|
||||
const fileCollection : ITorrentFileCollection = {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{
|
||||
id: 1,
|
||||
title: 'Test video',
|
||||
@@ -319,7 +333,7 @@ describe('TorrentEntriesService Tests', () => {
|
||||
subtitles: [],
|
||||
};
|
||||
|
||||
const fileCollectionWithContents : ITorrentFileCollection = {
|
||||
const fileCollectionWithContents: ITorrentFileCollection = {
|
||||
...fileCollection,
|
||||
contents: [{
|
||||
size: 123456,
|
||||
|
||||
@@ -5,6 +5,8 @@ import {IMetadataService} from "@interfaces/metadata_service";
|
||||
import {IParsedTorrent} from "@interfaces/parsed_torrent";
|
||||
import {ITorrentDownloadService} from "@interfaces/torrent_download_service";
|
||||
import {TorrentFileService} from "@services/torrent_file_service";
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
|
||||
jest.mock('@services/logging_service', () => {
|
||||
return {
|
||||
@@ -38,10 +40,17 @@ describe('TorrentFileService tests', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockLoggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
mockDownloadService = jest.requireMock<ITorrentDownloadService>('@services/torrent_download_service');
|
||||
mockMetadataService = jest.requireMock<IMetadataService>('@services/metadata_service');
|
||||
torrentFileService = new TorrentFileService(mockMetadataService, mockDownloadService, mockLoggingService);
|
||||
|
||||
const container = new Container();
|
||||
container.bind<TorrentFileService>(TorrentFileService).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(mockLoggingService);
|
||||
container.bind<IMetadataService>(IocTypes.IMetadataService).toConstantValue(mockMetadataService);
|
||||
container.bind<ITorrentDownloadService>(IocTypes.ITorrentDownloadService).toConstantValue(mockDownloadService);
|
||||
torrentFileService = container.get(TorrentFileService);
|
||||
});
|
||||
|
||||
|
||||
|
||||
@@ -12,9 +12,9 @@ describe('TorrentSubtitleService tests', () => {
|
||||
|
||||
it('should assign subtitles to a single video', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{ title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' }],
|
||||
videos: [{title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'}],
|
||||
contents: [],
|
||||
subtitles: [{ title: 'Test subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash' }],
|
||||
subtitles: [{title: 'Test subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash'}],
|
||||
};
|
||||
|
||||
const result = torrentSubtitleService.assignSubtitles(fileCollection);
|
||||
@@ -27,7 +27,7 @@ describe('TorrentSubtitleService tests', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [],
|
||||
contents: [],
|
||||
subtitles: [{ title: 'Test subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash' }],
|
||||
subtitles: [{title: 'Test subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash'}],
|
||||
};
|
||||
|
||||
const result = torrentSubtitleService.assignSubtitles(fileCollection);
|
||||
@@ -37,7 +37,7 @@ describe('TorrentSubtitleService tests', () => {
|
||||
|
||||
it('should not assign subtitles if there are no subtitles', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{ title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' }],
|
||||
videos: [{title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'}],
|
||||
contents: [],
|
||||
subtitles: [],
|
||||
};
|
||||
@@ -50,13 +50,13 @@ describe('TorrentSubtitleService tests', () => {
|
||||
it('should assign subtitles to multiple videos', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [
|
||||
{ title: 'Test video S01E01', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' },
|
||||
{ title: 'Test video S01E02', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' }
|
||||
{title: 'Test video S01E01', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'},
|
||||
{title: 'Test video S01E02', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'}
|
||||
],
|
||||
contents: [],
|
||||
subtitles: [
|
||||
{ title: 'Test subtitle S01E01', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash' },
|
||||
{ title: 'Test subtitle S01E02', fileIndex: 1, path: 'Test path', infoHash: 'Test infoHash' }
|
||||
{title: 'Test subtitle S01E01', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash'},
|
||||
{title: 'Test subtitle S01E02', fileIndex: 1, path: 'Test path', infoHash: 'Test infoHash'}
|
||||
],
|
||||
};
|
||||
|
||||
@@ -69,9 +69,9 @@ describe('TorrentSubtitleService tests', () => {
|
||||
|
||||
it('should not assign subtitles if there are no matching videos', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [{ title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' }],
|
||||
videos: [{title: 'Test video', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'}],
|
||||
contents: [],
|
||||
subtitles: [{ title: 'Non-matching subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Non-matching infoHash' }],
|
||||
subtitles: [{title: 'Non-matching subtitle', fileIndex: 0, path: 'Test path', infoHash: 'Non-matching infoHash'}],
|
||||
};
|
||||
|
||||
const result = torrentSubtitleService.assignSubtitles(fileCollection);
|
||||
@@ -83,13 +83,13 @@ describe('TorrentSubtitleService tests', () => {
|
||||
it('should assign subtitles to the most probable videos based on filename, title, season, and episode', () => {
|
||||
const fileCollection: ITorrentFileCollection = {
|
||||
videos: [
|
||||
{ title: 'Test video S01E01', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' },
|
||||
{ title: 'Test video S01E02', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash' }
|
||||
{title: 'Test video S01E01', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'},
|
||||
{title: 'Test video S01E02', size: 123456, imdbId: 'tt1234567', infoHash: 'Test infoHash'}
|
||||
],
|
||||
contents: [],
|
||||
subtitles: [
|
||||
{ title: 'Test subtitle S01E01', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash' },
|
||||
{ title: 'Test subtitle S01E02', fileIndex: 1, path: 'Test path', infoHash: 'Test infoHash' }
|
||||
{title: 'Test subtitle S01E01', fileIndex: 0, path: 'Test path', infoHash: 'Test infoHash'},
|
||||
{title: 'Test subtitle S01E02', fileIndex: 1, path: 'Test path', infoHash: 'Test infoHash'}
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ import "reflect-metadata"; // required
|
||||
import {ICacheService} from '@interfaces/cache_service';
|
||||
import {ILoggingService} from '@interfaces/logging_service';
|
||||
import {TrackerService} from '@services/tracker_service';
|
||||
import {IocTypes} from "@setup/ioc_types";
|
||||
import {Container} from "inversify";
|
||||
import {setupServer} from 'msw/node';
|
||||
import * as responses from "./mock-responses/trackers_mock_responses";
|
||||
|
||||
@@ -21,7 +23,6 @@ jest.mock('@services/cache_service', () => {
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
beforeAll(() => server.listen())
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -41,7 +42,12 @@ describe('TrackerService', () => {
|
||||
beforeEach(() => {
|
||||
mockCacheService = jest.requireMock<ICacheService>('@services/cache_service');
|
||||
mockLoggingService = jest.requireMock<ILoggingService>('@services/logging_service');
|
||||
trackerService = new TrackerService(mockCacheService, mockLoggingService);
|
||||
|
||||
const container = new Container();
|
||||
container.bind<TrackerService>(TrackerService).toSelf();
|
||||
container.bind<ILoggingService>(IocTypes.ILoggingService).toConstantValue(mockLoggingService);
|
||||
container.bind<ICacheService>(IocTypes.ICacheService).toConstantValue(mockCacheService);
|
||||
trackerService = container.get(TrackerService);
|
||||
});
|
||||
|
||||
it('should get trackers', async () => {
|
||||
|
||||
@@ -48,6 +48,9 @@
|
||||
],
|
||||
"@jobs/*": [
|
||||
"lib/jobs/*"
|
||||
],
|
||||
"@setup/*": [
|
||||
"setup/*"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user