1 Commits

Author SHA1 Message Date
iPromKnight
684dbba2f0 RTN-025 and title category parsing (#195)
* update rtn to 025

* Implement movie / show type parsing

* switch to RTN in collectors

* ensure env for pythonnet is loaded, and that requirements copy for qbit

* version bump
2024-03-31 22:01:09 +01:00
58 changed files with 222 additions and 824 deletions

4
.gitignore vendored
View File

@@ -612,3 +612,7 @@ fabric.properties
# Mac directory indexes
.DS_Store
deployment/docker/stack.env
src/producer/src/python/
src/debrid-collector/python/
src/qbit-collector/python/

View File

@@ -94,7 +94,7 @@ services:
condition: service_healthy
env_file: stack.env
hostname: knightcrawler-addon
image: gabisonfire/knightcrawler-addon:2.0.17
image: gabisonfire/knightcrawler-addon:2.0.18
labels:
logging: promtail
networks:
@@ -117,7 +117,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-consumer:2.0.17
image: gabisonfire/knightcrawler-consumer:2.0.18
labels:
logging: promtail
networks:
@@ -138,7 +138,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-debrid-collector:2.0.17
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
labels:
logging: promtail
networks:
@@ -152,7 +152,7 @@ services:
migrator:
condition: service_completed_successfully
env_file: stack.env
image: gabisonfire/knightcrawler-metadata:2.0.17
image: gabisonfire/knightcrawler-metadata:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -163,7 +163,7 @@ services:
postgres:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-migrator:2.0.17
image: gabisonfire/knightcrawler-migrator:2.0.18
networks:
- knightcrawler-network
restart: "no"
@@ -182,7 +182,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-producer:2.0.17
image: gabisonfire/knightcrawler-producer:2.0.18
labels:
logging: promtail
networks:
@@ -207,7 +207,7 @@ services:
deploy:
replicas: ${QBIT_REPLICAS:-0}
env_file: stack.env
image: gabisonfire/knightcrawler-qbit-collector:2.0.17
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
labels:
logging: promtail
networks:

View File

@@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends
services:
metadata:
image: gabisonfire/knightcrawler-metadata:2.0.17
image: gabisonfire/knightcrawler-metadata:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -30,7 +30,7 @@ services:
condition: service_completed_successfully
migrator:
image: gabisonfire/knightcrawler-migrator:2.0.17
image: gabisonfire/knightcrawler-migrator:2.0.18
env_file: ../../.env
networks:
- knightcrawler-network
@@ -40,7 +40,7 @@ services:
condition: service_healthy
addon:
image: gabisonfire/knightcrawler-addon:2.0.17
image: gabisonfire/knightcrawler-addon:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
hostname: knightcrawler-addon
@@ -48,22 +48,22 @@ services:
- "7000:7000"
consumer:
image: gabisonfire/knightcrawler-consumer:2.0.17
image: gabisonfire/knightcrawler-consumer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
debridcollector:
image: gabisonfire/knightcrawler-debrid-collector:2.0.17
image: gabisonfire/knightcrawler-debrid-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
producer:
image: gabisonfire/knightcrawler-producer:2.0.17
image: gabisonfire/knightcrawler-producer:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
qbitcollector:
image: gabisonfire/knightcrawler-qbit-collector:2.0.17
image: gabisonfire/knightcrawler-qbit-collector:2.0.18
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
depends_on:

View File

@@ -1,6 +0,0 @@
apiVersion: v2
appVersion: 2.0.17
description: A helm chart for Knightcrawler
name: knightcrawler
type: application
version: 0.1.0

View File

@@ -1,6 +0,0 @@
Congratulations,
Knightcrawler is now deployed. This may take a while to be up and responding.

View File

@@ -1,27 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: '{{ .Release.Name }}-config'
labels:
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
data:
COLLECTOR_DEBRID_ENABLED: '{{ .Values.knightcrawler.debridcollector.enabled }}'
COLLECTOR_QBIT_ENABLED: '{{ .Values.knightcrawler.qbitcollector.enabled }}'
DEBUG_MODE: '{{ .Values.knightcrawler.debug }}'
METADATA_INSERT_BATCH_SIZE: '{{ .Values.environment.metadata.insertBatchSize }}'
POSTGRES_DB: '{{ .Values.environment.postgres.dbName }}'
POSTGRES_HOST: '{{ if .Values.environment.postgres.external }}{{ .Values.environment.postgres.host }}{{ else }}{{ .Release.Name }}-postgres{{ end }}'
POSTGRES_PORT: '{{ .Values.environment.postgres.port }}'
QBIT_HOST: '{{ .Values.environment.qbitcollector.qbitHost }}'
QBIT_TRACKERS_URL: '{{ .Values.environment.qbitcollector.trackersUrl }}'
RABBITMQ_CONSUMER_QUEUE_NAME: '{{ .Values.environment.producer.queueName }}'
RABBITMQ_DURABLE: '{{ .Values.environment.producer.durable }}'
RABBITMQ_HOST: '{{ if .Values.environment.lavinmq.external }}{{ .Values.environment.lavinmq.host }}{{ else }}{{ .Release.Name }}-lavinmq{{ end }}'
RABBITMQ_MAX_PUBLISH_BATCH_SIZE: '{{ .Values.environment.producer.maxPublishBatchSize }}'
RABBITMQ_MAX_QUEUE_SIZE: '{{ .Values.environment.producer.maxQueueSize }}'
RABBITMQ_PUBLISH_INTERVAL_IN_SECONDS: '{{ .Values.environment.producer.publishIntervalSeconds }}'
REDIS_EXTRA: '{{ .Values.environment.redis.extra }}'
REDIS_HOST: '{{ if .Values.environment.redis.external }}{{ .Values.environment.redis.host }}{{ else }}{{ .Release.Name }}-redis{{ end }}'
REDIS_PORT: '{{ .Values.environment.redis.port }}'
TZ: '{{ .Values.shared.timezone }}'

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Secret
metadata:
name: '{{ .Release.Name }}-secrets'
labels:
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
type: Opaque
data:
GITHUB_PAT: '{{ .Values.environment.producer.githubPat | b64enc }}'
COLLECTOR_REAL_DEBRID_API_KEY: '{{ .Values.environment.debridcollector.realDebridApiKey | b64enc }}'
POSTGRES_USER: '{{ .Values.environment.postgres.user | b64enc }}'
POSTGRES_PASSWORD: '{{ .Values.environment.postgres.password | b64enc }}'
RABBITMQ_PASSWORD: '{{ .Values.environment.lavinmq.password | b64enc }}'
RABBITMQ_USER: '{{ .Values.environment.lavinmq.user | b64enc }}'

View File

@@ -1,25 +0,0 @@
{{ if .Values.infrastructure.lavinmq.enabled }}
apiVersion: v1
kind: Service
metadata:
name: '{{ .Release.Name }}-lavinmq'
labels:
component: lavinmq
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
spec:
selector:
component: lavinmq
release: '{{ .Release.Name }}'
type: ClusterIP
ports:
- protocol: TCP
port: 5672
targetPort: 5672
- protocol: TCP
port: 15672
targetPort: 15672
- protocol: TCP
port: 15692
targetPort: 15692
{{- end -}}

View File

@@ -1,60 +0,0 @@
{{ if .Values.infrastructure.lavinmq.enabled }}
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: '{{ .Release.Name }}-lavinmq'
labels:
component: lavinmq
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "0"
spec:
serviceName: '{{ .Release.Name }}-lavinmq'
replicas: 1
selector:
matchLabels:
component: lavinmq
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: lavinmq
release: '{{ .Release.Name }}'
spec:
containers:
- name: lavinmq
image: '{{ .Values.infrastructure.lavinmq.image }}:{{ .Values.infrastructure.lavinmq.tag }}'
ports:
- name: lavinmq
containerPort: 5672
- name: lavinmq-15672
containerPort: 15672
- name: lavinmq-15692
containerPort: 15692
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'
volumeMounts:
- mountPath: /var/lib/lavinmq
name: lavinmq
livenessProbe:
exec:
command:
- lavinmqctl status
periodSeconds: 10
initialDelaySeconds: 10
successThreshold: 1
failureThreshold: 3
volumeClaimTemplates:
- metadata:
name: lavinmq
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: '{{ .Values.persistence.lavinmq.capacity }}'
{{- end -}}

View File

@@ -1,19 +0,0 @@
{{ if .Values.infrastructure.postgres.enabled }}
apiVersion: v1
kind: Service
metadata:
name: '{{ .Release.Name }}-postgres'
labels:
component: postgres
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
spec:
selector:
component: postgres
release: '{{ .Release.Name }}'
type: ClusterIP
ports:
- protocol: TCP
port: 5432
targetPort: 5432
{{- end -}}

View File

@@ -1,58 +0,0 @@
{{ if .Values.infrastructure.postgres.enabled }}
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: '{{ .Release.Name }}-postgres'
labels:
component: postgres
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "0"
spec:
serviceName: '{{ .Release.Name }}-postgres'
replicas: 1
selector:
matchLabels:
component: postgres
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: postgres
release: '{{ .Release.Name }}'
spec:
containers:
- name: postgres
image: '{{ .Values.infrastructure.postgres.image }}:{{ .Values.infrastructure.postgres.tag }}'
ports:
- name: postgres
containerPort: 5432
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'
volumeMounts:
- mountPath: /var/lib/postgresql/data
name: postgres
livenessProbe:
exec:
command:
- sh
- -c
- pg_isready -h localhost -U $POSTGRES_USER
periodSeconds: 10
initialDelaySeconds: 10
successThreshold: 1
failureThreshold: 3
volumeClaimTemplates:
- metadata:
name: postgres
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: '{{ .Values.persistence.postgres.capacity }}'
{{- end -}}

View File

@@ -1,57 +0,0 @@
{{ if .Values.knightcrawler.qbitcollector.enabled }}
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-qbittorrent'
labels:
component: qbittorrent
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "0"
spec:
replicas: 1
selector:
matchLabels:
component: qbittorrent
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: qbittorrent
release: '{{ .Release.Name }}'
spec:
containers:
- name: qbittorrent
image: '{{ .Values.infrastructure.qbittorrent.image }}:{{ .Values.infrastructure.qbittorrent.tag }}'
ports:
- name: qbittorrent
containerPort: 6881
- name: qbittorrent-6881
containerPort: 6881
- name: qbittorrent-8080
containerPort: 8080
env:
- name: PUID
value: '{{ .Values.environment.qbittorrent.puid }}'
- name: PGID
value: '{{ .Values.environment.qbittorrent.pgid }}'
- name: TORRENTING_PORT
value: '{{ .Values.environment.qbittorrent.torrentingPort }}'
- name: WEBUI_PORT
value: '{{ .Values.environment.qbittorrent.webuiPort }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'
livenessProbe:
exec:
command:
- curl --fail http://localhost:8080
periodSeconds: 10
initialDelaySeconds: 10
successThreshold: 1
failureThreshold: 3
{{- end -}}

View File

@@ -1,25 +0,0 @@
{{ if .Values.knightcrawler.qbitcollector.enabled }}
apiVersion: v1
kind: Service
metadata:
name: '{{ .Release.Name }}-qbittorrent'
labels:
component: qbittorrent
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
spec:
selector:
component: qbittorrent
release: '{{ .Release.Name }}'
type: ClusterIP
ports:
- protocol: TCP
port: 6881
targetPort: 6881
- protocol: TCP
port: 6881
targetPort: 6881
- protocol: TCP
port: 8080
targetPort: 8080
{{- end -}}

View File

@@ -1,19 +0,0 @@
{{ if .Values.infrastructure.redis.enabled }}
apiVersion: v1
kind: Service
metadata:
name: '{{ .Release.Name }}-redis'
labels:
component: redis
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
spec:
selector:
component: redis
release: '{{ .Release.Name }}'
type: ClusterIP
ports:
- protocol: TCP
port: 6379
targetPort: 6379
{{- end -}}

View File

@@ -1,56 +0,0 @@
{{ if .Values.infrastructure.redis.enabled }}
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: '{{ .Release.Name }}-redis'
labels:
component: redis
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "0"
spec:
serviceName: '{{ .Release.Name }}-redis'
replicas: 1
selector:
matchLabels:
component: redis
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: redis
release: '{{ .Release.Name }}'
spec:
containers:
- name: redis
image: '{{ .Values.infrastructure.redis.image }}:{{ .Values.infrastructure.redis.tag }}'
ports:
- name: redis
containerPort: 6379
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'
volumeMounts:
- mountPath: /data
name: redis
livenessProbe:
exec:
command:
- redis-cli ping
periodSeconds: 10
initialDelaySeconds: 10
successThreshold: 1
failureThreshold: 3
volumeClaimTemplates:
- metadata:
name: redis
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: '{{ .Values.persistence.redis.capacity }}'
{{- end -}}

View File

@@ -1,28 +0,0 @@
apiVersion: batch/v1
kind: Job
metadata:
name: '{{ .Release.Name }}-metadata'
labels:
component: metadata
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "2"
"helm.sh/hook-delete-policy": hook-succeeded
spec:
template:
metadata:
labels:
component: metadata
release: '{{ .Release.Name }}'
spec:
restartPolicy: OnFailure
containers:
- name: metadata
image: '{{ .Values.knightcrawler.metadata.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.metadata.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,28 +0,0 @@
apiVersion: batch/v1
kind: Job
metadata:
name: '{{ .Release.Name }}-migrator'
labels:
component: migrator
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "1"
"helm.sh/hook-delete-policy": hook-succeeded
spec:
template:
metadata:
labels:
component: migrator
release: '{{ .Release.Name }}'
spec:
restartPolicy: OnFailure
containers:
- name: migrator
image: '{{ .Values.knightcrawler.migrator.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.migrator.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,35 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-addon'
labels:
component: addon
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "4"
spec:
replicas: {{ .Values.knightcrawler.addon.replicas }}
selector:
matchLabels:
component: addon
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: addon
release: '{{ .Release.Name }}'
spec:
containers:
- name: addon
image: '{{ .Values.knightcrawler.addon.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.addon.tag}}{{ end }}'
ports:
- name: addon
containerPort: 7000
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,32 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-consumer'
labels:
component: consumer
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "4"
spec:
replicas: {{ .Values.knightcrawler.consumer.replicas }}
selector:
matchLabels:
component: consumer
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: consumer
release: '{{ .Release.Name }}'
spec:
containers:
- name: consumer
image: '{{ .Values.knightcrawler.consumer.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.consumer.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,31 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-debridcollector'
labels:
component: debridcollector
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "4"
spec:
replicas: {{ .Values.knightcrawler.debridcollector.replicas }}
selector:
matchLabels:
component: debridcollector
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: debridcollector
release: '{{ .Release.Name }}'
spec:
containers:
- name: debridcollector
image: '{{ .Values.knightcrawler.debridcollector.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.debridcollector.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,31 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-producer'
labels:
component: producer
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "4"
spec:
replicas: {{ .Values.knightcrawler.producer.replicas }}
selector:
matchLabels:
component: producer
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: producer
release: '{{ .Release.Name }}'
spec:
containers:
- name: producer
image: '{{ .Values.knightcrawler.producer.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.producer.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'

View File

@@ -1,33 +0,0 @@
{{ if .Values.knightcrawler.qbitcollector.enabled }}
apiVersion: apps/v1
kind: Deployment
metadata:
name: '{{ .Release.Name }}-qbitcollector'
labels:
component: qbitcollector
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
annotations:
"helm.sh/hook": post-install,post-upgrade
"helm.sh/hook-weight": "4"
spec:
replicas: {{ .Values.knightcrawler.qbitcollector.replicas }}
selector:
matchLabels:
component: qbitcollector
release: '{{ .Release.Name }}'
template:
metadata:
labels:
component: qbitcollector
release: '{{ .Release.Name }}'
spec:
containers:
- name: qbitcollector
image: '{{ .Values.knightcrawler.qbitcollector.image }}{{ if ne .Values.knightcrawler.globalImageTagOverride "" }}:{{ .Values.knightcrawler.globalImageTagOverride }}{{else}}:{{ .Values.knightcrawler.qbitcollector.tag}}{{ end }}'
envFrom:
- configMapRef:
name: '{{ .Release.Name }}-config'
- secretRef:
name: '{{ .Release.Name }}-secrets'
{{- end -}}

View File

@@ -1,17 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: '{{ .Release.Name }}-addon'
labels:
component: addon
project: '{{ .Chart.Name }}'
release: '{{ .Release.Name }}'
spec:
selector:
component: addon
release: '{{ .Release.Name }}'
type: ClusterIP
ports:
- protocol: TCP
port: 7000
targetPort: 7000

View File

@@ -1,100 +0,0 @@
knightcrawler:
debug: false
globalImageTagOverride: ""
addon:
image: "gabisonfire/knightcrawler-addon"
tag: "2.0.17"
replicas: 1
consumer:
image: "gabisonfire/knightcrawler-consumer"
tag: "2.0.17"
replicas: 1
metadata:
image: "gabisonfire/knightcrawler-metadata"
tag: "2.0.17"
replicas: 1
migrator:
image: "gabisonfire/knightcrawler-migrator"
tag: "2.0.17"
replicas: 1
debridcollector:
image: "gabisonfire/knightcrawler-debrid-collector"
tag: "2.0.17"
enabled: true
replicas: 1
qbitcollector:
image: "gabisonfire/knightcrawler-qbit-collector"
tag: "2.0.17"
enabled: false
replicas: 1
producer:
image: "gabisonfire/knightcrawler-producer"
tag: "2.0.17"
replicas: 1
infrastructure:
lavinmq:
image: "cloudamqp/lavinmq"
tag: "latest"
enabled: true
postgres:
image: "postgres"
tag: "latest"
enabled: true
redis:
image: "redis/redis-stack-server"
tag: "latest"
enabled: true
qbittorrent:
image: "lscr.io/linuxserver/qbittorrent"
tag: "latest"
environment:
redis:
external: false
host: ""
port: "6379"
extra: "abortConnect=false,allowAdmin=true"
postgres:
external: false
host: ""
port: "5432"
dbName: "knightcrawler"
user: "postgres"
password: "postgres"
lavinmq:
external: false
host: ""
user: "guest"
password: "guest"
qbitcollector:
qbitHost: "http://qbittorrent:8080"
trackersUrl: "https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_all_http.txt"
debridcollector:
realDebridApiKey: ""
producer:
githubPat: ""
queueName: "ingested"
durable: true
maxPublishBatchSize: 500
maxQueueSize: 0
publishIntervalSeconds: 10
metadata:
insertBatchSize: 50000
qbittorrent:
pgid: "1000"
puid: "1000"
torrentingPort: "6881"
webuiPort: "8080"
persistence:
storageClassName: ""
redis:
capacity: 1Gi
postgres:
capacity: 1Gi
lavinmq:
capacity: 1Gi
shared:
timezone: "London/Europe"

View File

@@ -17,7 +17,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
@@ -29,10 +28,30 @@
<None Include="Configuration\logging.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="requirements.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<Content Remove="eng\**" />
<None Remove="eng\**" />
</ItemGroup>
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
<Content Remove="python\**" />
<None Include="python\**">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\shared\SharedContracts.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Remove="eng\**" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Remove="eng\**" />
</ItemGroup>
</Project>

View File

@@ -6,6 +6,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharedContracts", "..\share
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F53-28E6-404F-9EFE-DADFBEF8338B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{72A042C3-B4F3-45C5-AC20-041FE8F41EFC}"
ProjectSection(SolutionItems) = preProject
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
eng\install-python-reqs.sh = eng\install-python-reqs.sh
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU

View File

@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .
RUN rm -rf /app/python && mkdir -p /app/python
RUN pip3 install -r /app/requirements.txt -t /app/python
RUN addgroup -S debrid && adduser -S -G debrid debrid
USER debrid
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD pgrep -f dotnet || exit 1
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
ENTRYPOINT ["dotnet", "DebridCollector.dll"]

View File

@@ -1,5 +1,3 @@
using DebridCollector.Features.Configuration;
namespace DebridCollector.Extensions;
public static class ServiceCollectionExtensions
@@ -17,7 +15,8 @@ public static class ServiceCollectionExtensions
var serviceConfiguration = services.LoadConfigurationFromEnv<DebridCollectorConfiguration>();
services.AddRealDebridClient(serviceConfiguration);
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
services.RegisterPythonEngine();
services.AddSingleton<IRankTorrentName, RankTorrentName>();
services.AddHostedService<DebridRequestProcessor>();
return services;

View File

@@ -1,6 +1,4 @@
using DebridCollector.Features.Configuration;
namespace DebridCollector.Features.Debrid;
namespace DebridCollector.Features.Debrid;
public static class ServiceCollectionExtensions
{

View File

@@ -3,10 +3,11 @@ namespace DebridCollector.Features.Worker;
public static class DebridMetaToTorrentMeta
{
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
IParseTorrentTitle torrentTitle,
IRankTorrentName rankTorrentName,
Torrent torrent,
string ImdbId,
FileDataDictionary Metadata)
FileDataDictionary Metadata,
ILogger<WriteMetadataConsumer> logger)
{
try
{
@@ -26,23 +27,30 @@ public static class DebridMetaToTorrentMeta
Size = metadataEntry.Value.Filesize.GetValueOrDefault(),
};
var parsedTitle = torrentTitle.Parse(file.Title);
var parsedTitle = rankTorrentName.Parse(file.Title, false);
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
if (!parsedTitle.Success)
{
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
continue;
}
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
files.Add(file);
}
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
return [];
}
}
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata)
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, FileDataDictionary Metadata, ILogger<WriteMetadataConsumer> logger)
{
try
{
@@ -74,8 +82,9 @@ public static class DebridMetaToTorrentMeta
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
return [];
}
}

View File

@@ -53,6 +53,12 @@ public class InfohashMetadataSagaStateMachine : MassTransitStateMachine<Infohash
.Then(
context =>
{
if (!context.Message.WithFiles)
{
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
return;
}
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
})
.TransitionTo(Completed)

View File

@@ -16,7 +16,7 @@ public record WriteMetadata(Torrent Torrent, TorrentMetadataResponse Metadata, s
}
[EntityName("metadata-written-debrid-colloctor")]
public record MetadataWritten(TorrentMetadataResponse Metadata) : CorrelatedBy<Guid>
public record MetadataWritten(TorrentMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;
}

View File

@@ -1,25 +1,28 @@
namespace DebridCollector.Features.Worker;
public class WriteMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteMetadata>
public class WriteMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteMetadataConsumer> logger) : IConsumer<WriteMetadata>
{
public async Task Consume(ConsumeContext<WriteMetadata> context)
{
var request = context.Message;
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
var torrentFiles = DebridMetaToTorrentMeta.MapMetadataToFilesCollection(rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
if (torrentFiles.Any())
if (!torrentFiles.Any())
{
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new MetadataWritten(request.Metadata, false));
return;
}
await context.Publish(new MetadataWritten(request.Metadata));
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await DebridMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new MetadataWritten(request.Metadata, true));
}
}

View File

@@ -4,17 +4,18 @@ global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Threading.Channels;
global using DebridCollector.Extensions;
global using DebridCollector.Features.Configuration;
global using DebridCollector.Features.Debrid;
global using DebridCollector.Features.Worker;
global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using Polly;
global using Polly.Extensions.Http;
global using PromKnight.ParseTorrentTitle;
global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;
global using SharedContracts.Models;
global using SharedContracts.Python;
global using SharedContracts.Python.RTN;
global using SharedContracts.Requests;

View File

@@ -0,0 +1,2 @@
mkdir -p ../python
python -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1,5 @@
#!/bin/bash
rm -rf ../python
mkdir -p ../python
python3 -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1 @@
rank-torrent-name==0.2.5

View File

@@ -1 +1 @@
rank-torrent-name==0.1.8
rank-torrent-name==0.2.5

View File

@@ -9,12 +9,23 @@ RUN dotnet restore -a $TARGETARCH
RUN dotnet publish -c Release --no-restore -o /src/out -a $TARGETARCH
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine
FROM mcr.microsoft.com/dotnet/aspnet:8.0-alpine3.19
WORKDIR /app
ENV PYTHONUNBUFFERED=1
RUN apk add --update --no-cache python3=~3.11.8-r0 py3-pip && ln -sf python3 /usr/bin/python
COPY --from=build /src/out .
RUN rm -rf /app/python && mkdir -p /app/python
RUN pip3 install -r /app/requirements.txt -t /app/python
RUN addgroup -S qbit && adduser -S -G qbit qbit
USER qbit
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
CMD pgrep -f dotnet || exit 1
ENV PYTHONNET_PYDLL=/usr/lib/libpython3.11.so.1.0
ENTRYPOINT ["dotnet", "QBitCollector.dll"]

View File

@@ -13,7 +13,8 @@ public static class ServiceCollectionExtensions
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
{
services.AddQBitTorrentClient();
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
services.RegisterPythonEngine();
services.AddSingleton<IRankTorrentName, RankTorrentName>();
services.AddSingleton<QbitRequestProcessor>();
services.AddHttpClient();
services.AddSingleton<ITrackersService, TrackersService>();

View File

@@ -3,10 +3,11 @@ namespace QBitCollector.Features.Worker;
public static class QbitMetaToTorrentMeta
{
public static IReadOnlyList<TorrentFile> MapMetadataToFilesCollection(
IParseTorrentTitle torrentTitle,
IRankTorrentName rankTorrentName,
Torrent torrent,
string ImdbId,
IReadOnlyList<TorrentContent> Metadata)
IReadOnlyList<TorrentContent> Metadata,
ILogger<WriteQbitMetadataConsumer> logger)
{
try
{
@@ -24,23 +25,31 @@ public static class QbitMetaToTorrentMeta
Size = metadataEntry.Size,
};
var parsedTitle = torrentTitle.Parse(file.Title);
var parsedTitle = rankTorrentName.Parse(file.Title, false);
if (!parsedTitle.Success)
{
logger.LogWarning("Failed to parse title {Title} for metadata mapping", file.Title);
continue;
}
file.ImdbSeason = parsedTitle.Seasons.FirstOrDefault();
file.ImdbEpisode = parsedTitle.Episodes.FirstOrDefault();
file.ImdbSeason = parsedTitle.Response?.Season?.FirstOrDefault() ?? 0;
file.ImdbEpisode = parsedTitle.Response?.Episode?.FirstOrDefault() ?? 0;
files.Add(file);
}
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to files collection: {Exception}", ex.Message);
return [];
}
}
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata)
public static async Task<IReadOnlyList<SubtitleFile>> MapMetadataToSubtitlesCollection(IDataStorage storage, string InfoHash, IReadOnlyList<TorrentContent> Metadata,
ILogger<WriteQbitMetadataConsumer> logger)
{
try
{
@@ -70,8 +79,9 @@ public static class QbitMetaToTorrentMeta
return files;
}
catch (Exception)
catch (Exception ex)
{
logger.LogWarning("Failed to map metadata to subtitles collection: {Exception}", ex.Message);
return [];
}
}

View File

@@ -53,6 +53,12 @@ public class QbitMetadataSagaStateMachine : MassTransitStateMachine<QbitMetadata
.Then(
context =>
{
if (!context.Message.WithFiles)
{
logger.LogInformation("No files written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
return;
}
logger.LogInformation("Metadata Written for torrent {InfoHash} in Saga {SagaId}", context.Saga.Torrent.InfoHash, context.Saga.CorrelationId);
})
.TransitionTo(Completed)

View File

@@ -16,7 +16,7 @@ public record WriteQbitMetadata(Torrent Torrent, QBitMetadataResponse Metadata,
}
[EntityName("metadata-written-qbit-collector")]
public record QbitMetadataWritten(QBitMetadataResponse Metadata) : CorrelatedBy<Guid>
public record QbitMetadataWritten(QBitMetadataResponse Metadata, bool WithFiles) : CorrelatedBy<Guid>
{
public Guid CorrelationId { get; init; } = Metadata.CorrelationId;

View File

@@ -1,25 +1,30 @@
namespace QBitCollector.Features.Worker;
public class WriteQbitMetadataConsumer(IParseTorrentTitle parseTorrentTitle, IDataStorage dataStorage) : IConsumer<WriteQbitMetadata>
public class WriteQbitMetadataConsumer(IRankTorrentName rankTorrentName, IDataStorage dataStorage, ILogger<WriteQbitMetadataConsumer> logger) : IConsumer<WriteQbitMetadata>
{
public async Task Consume(ConsumeContext<WriteQbitMetadata> context)
{
var request = context.Message;
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(parseTorrentTitle, request.Torrent, request.ImdbId, request.Metadata.Metadata);
if (torrentFiles.Any())
var torrentFiles = QbitMetaToTorrentMeta.MapMetadataToFilesCollection(
rankTorrentName, request.Torrent, request.ImdbId, request.Metadata.Metadata, logger);
if (!torrentFiles.Any())
{
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new QbitMetadataWritten(request.Metadata, false));
return;
}
await context.Publish(new QbitMetadataWritten(request.Metadata));
await dataStorage.InsertFiles(torrentFiles);
var subtitles = await QbitMetaToTorrentMeta.MapMetadataToSubtitlesCollection(
dataStorage, request.Torrent.InfoHash, request.Metadata.Metadata, logger);
if (subtitles.Any())
{
await dataStorage.InsertSubtitles(subtitles);
}
await context.Publish(new QbitMetadataWritten(request.Metadata, true));
}
}

View File

@@ -1,17 +1,11 @@
// Global using directives
global using System.Text.Json;
global using System.Text.Json.Serialization;
global using System.Threading.Channels;
global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.Caching.Distributed;
global using Microsoft.Extensions.Caching.Memory;
global using Microsoft.Extensions.DependencyInjection;
global using Polly;
global using Polly.Extensions.Http;
global using PromKnight.ParseTorrentTitle;
global using QBitCollector.Extensions;
global using QBitCollector.Features.Qbit;
global using QBitCollector.Features.Trackers;
@@ -21,4 +15,6 @@ global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;
global using SharedContracts.Models;
global using SharedContracts.Python;
global using SharedContracts.Python.RTN;
global using SharedContracts.Requests;

View File

@@ -18,7 +18,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="QBittorrent.Client" Version="1.9.23349.1" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
@@ -31,10 +30,30 @@
<None Include="Configuration\logging.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<Content Remove="eng\**" />
<None Remove="eng\**" />
<None Update="requirements.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\shared\SharedContracts.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(Configuration)' == 'Debug'">
<Content Remove="python\**" />
<None Include="python\**">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<Compile Remove="eng\**" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Remove="eng\**" />
</ItemGroup>
</Project>

View File

@@ -6,6 +6,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "shared", "shared", "{2C0A0F
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "QBitCollector", "QBitCollector.csproj", "{1EF124BE-6EBE-4D9E-846C-FFF814999F3B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{2F2EA33A-1303-405D-939B-E9394D262BC9}"
ProjectSection(SolutionItems) = preProject
eng\install-python-reqs.ps1 = eng\install-python-reqs.ps1
eng\install-python-reqs.sh = eng\install-python-reqs.sh
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU

View File

@@ -0,0 +1,3 @@
Remove-Item -Recurse -Force ../python
mkdir -p ../python
python -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1,5 @@
#!/bin/bash
rm -rf ../python
mkdir -p ../python
python3 -m pip install -r ../requirements.txt -t ../python/

View File

@@ -0,0 +1 @@
rank-torrent-name==0.2.5

View File

@@ -167,12 +167,7 @@ public class DapperDataStorage(PostgresConfiguration configuration, RabbitMqConf
INSERT INTO subtitles
("infoHash", "fileIndex", "fileId", "title")
VALUES
(@InfoHash, @FileIndex, @FileId, @Title)
ON CONFLICT
("infoHash", "fileIndex")
DO UPDATE SET
"fileId" = COALESCE(subtitles."fileId", EXCLUDED."fileId"),
"title" = COALESCE(subtitles."title", EXCLUDED."title");
(@InfoHash, @FileIndex, @FileId, @Title);
""";
await connection.ExecuteAsync(query, subtitles);

View File

@@ -2,5 +2,5 @@ namespace SharedContracts.Python.RTN;
public interface IRankTorrentName
{
ParseTorrentTitleResponse Parse(string title);
ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true);
}

View File

@@ -13,14 +13,14 @@ public class RankTorrentName : IRankTorrentName
InitModules();
}
public ParseTorrentTitleResponse Parse(string title) =>
public ParseTorrentTitleResponse Parse(string title, bool trashGarbage = true) =>
_pythonEngineService.ExecutePythonOperationWithDefault(
() =>
{
var result = _rtn?.parse(title);
var result = _rtn?.parse(title, trashGarbage);
return ParseResult(result);
}, new ParseTorrentTitleResponse(false, null), nameof(Parse), throwOnErrors: false, logErrors: false);
private static ParseTorrentTitleResponse ParseResult(dynamic result)
{
if (result == null)
@@ -34,9 +34,18 @@ public class RankTorrentName : IRankTorrentName
{
return new(false, null);
}
var mediaType = result.GetAttr("type")?.As<string>();
if (string.IsNullOrEmpty(mediaType))
{
return new(false, null);
}
var response = JsonSerializer.Deserialize<RtnResponse>(json);
response.IsMovie = mediaType.Equals("movie", StringComparison.OrdinalIgnoreCase);
return new(true, response);
}

View File

@@ -76,23 +76,8 @@ public class RtnResponse
[JsonPropertyName("extended")]
public bool Extended { get; set; }
// [JsonPropertyName("is_show")]
// public bool IsTvShow { get; set; }
//
// [JsonPropertyName("is_movie")]
// public bool IsMovie { get; set; }
public bool IsMovie { get; set; }
public string ToJson() => this.AsJson();
public bool IsMovie => !TvRegexes.Any(regex => regex.IsMatch(RawTitle)) && Season?.Count == 0 && Episode?.Count == 0;
private static List<Regex> TvRegexes { get; set; } =
[
new(@"[se]\d\d", RegexOptions.IgnoreCase),
new(@"\b(tv|complete)\b", RegexOptions.IgnoreCase),
new(@"\b(saisons?|stages?|seasons?).?\d", RegexOptions.IgnoreCase),
new(@"[a-z]\s?\-\s?\d{2,4}\b", RegexOptions.IgnoreCase),
new(@"\d{2,4}\s?\-\s?\d{2,4}\b", RegexOptions.IgnoreCase),
];
}

View File

@@ -82,11 +82,4 @@ public static class ServiceCollectionExtensions
x.AddConsumer<PerformIngestionConsumer>();
}
internal static IServiceCollection AddServiceConfiguration(this IServiceCollection services)
{
services.AddSingleton<IParseTorrentTitle, ParseTorrentTitle>();
return services;
}
}

View File

@@ -5,7 +5,6 @@ global using MassTransit;
global using MassTransit.Mediator;
global using Microsoft.AspNetCore.Builder;
global using Microsoft.Extensions.DependencyInjection;
global using PromKnight.ParseTorrentTitle;
global using SharedContracts.Configuration;
global using SharedContracts.Dapper;
global using SharedContracts.Extensions;

View File

@@ -10,7 +10,6 @@ builder.Host
builder.Services
.RegisterMassTransit()
.AddServiceConfiguration()
.AddDatabase();
var app = builder.Build();

View File

@@ -16,7 +16,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="8.0.3" />
<PackageReference Include="Polly" Version="8.3.1" />
<PackageReference Include="PromKnight.ParseTorrentTitle" Version="1.0.4" />
<PackageReference Include="Serilog" Version="3.1.1" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />