mirror of
https://github.com/knightcrawler-stremio/knightcrawler.git
synced 2024-12-20 03:29:51 +00:00
Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours. Simplifies a lot of torrentio to deal with new data
This commit is contained in:
4
env/addon.env
vendored
Normal file
4
env/addon.env
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
TZ=London/Europe
|
||||
DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio
|
||||
MONGODB_URI=mongodb://mongo:mongo@mongodb/selfhostio?tls=false&authSource=admin
|
||||
DEBUG_MODE=false
|
||||
11
env/consumer.env
vendored
Normal file
11
env/consumer.env
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
TZ=London/Europe
|
||||
MONGODB_URI=mongodb://mongo:mongo@mongodb/selfhostio?tls=false&authSource=admin
|
||||
POSTGRES_DATABASE_URI=postgres://postgres:postgres@postgres/selfhostio
|
||||
RABBIT_URI=amqp://guest:guest@rabbitmq:5672/?heartbeat=30
|
||||
QUEUE_NAME=ingested
|
||||
JOB_CONCURRENCY=5
|
||||
JOBS_ENABLED=true
|
||||
ENABLE_SYNC=true
|
||||
MAX_SINGLE_TORRENT_CONNECTIONS=10
|
||||
TORRENT_TIMEOUT=30000
|
||||
UDP_TRACKERS_ENABLED=true
|
||||
4
env/producer.env
vendored
Normal file
4
env/producer.env
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
ScrapeConfiguration__StorageConnectionString=host=postgres;username=postgres;password=postgres;database=selfhostio;
|
||||
RabbitMqConfiguration__Host=rabbitmq
|
||||
RabbitMqConfiguration__QueueName=ingested
|
||||
GithubSettings__PAT=
|
||||
Reference in New Issue
Block a user