Big rewrite - distributed consumers for ingestion / scraping(scalable) - single producer written in c#.
Changed from page scraping to rss xml scraping Includes RealDebridManager hashlist decoding (requires a github readonly PAT as requests must be authenticated) - This allows ingestion of 200k+ entries in a few hours. Simplifies a lot of torrentio to deal with new data
This commit is contained in:
14
src/node/addon/index.js
Normal file
14
src/node/addon/index.js
Normal file
@@ -0,0 +1,14 @@
|
||||
import express from 'express';
|
||||
import serverless from './serverless.js';
|
||||
import { initBestTrackers } from './lib/magnetHelper.js';
|
||||
import {ipFilter} from "./lib/ipFilter.js";
|
||||
|
||||
const app = express();
|
||||
app.enable('trust proxy');
|
||||
app.use(ipFilter);
|
||||
app.use(express.static('static', { maxAge: '1y' }));
|
||||
app.use((req, res, next) => serverless(req, res, next));
|
||||
app.listen(process.env.PORT || 7000, () => {
|
||||
initBestTrackers()
|
||||
.then(() => console.log(`Started addon at: http://localhost:${process.env.PORT || 7000}`));
|
||||
});
|
||||
Reference in New Issue
Block a user