From dff66d8fb28de75a06f1115d9bcd3caf1c3e0314 Mon Sep 17 00:00:00 2001 From: TheBeastLT Date: Tue, 10 Mar 2020 14:45:45 +0100 Subject: [PATCH] adds cron job for scraping --- index.js | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/index.js b/index.js index 468c0d0..a102135 100644 --- a/index.js +++ b/index.js @@ -1,6 +1,7 @@ require('dotenv').config(); const express = require("express"); const server = express(); +const schedule = require('node-schedule'); const { connect } = require('./lib/repository'); const thepiratebayScraper = require('./scrapers/thepiratebay/thepiratebay_scraper'); const horribleSubsScraper = require('./scrapers/horriblesubs/horriblesubs_scraper'); @@ -10,30 +11,28 @@ const rarbgScraper = require('./scrapers/rarbg/rarbg_scraper'); const thepiratebayDumpScraper = require('./scrapers/thepiratebay/thepiratebay_dump_scraper'); const thepiratebayUnofficialDumpScraper = require('./scrapers/thepiratebay/thepiratebay_unofficial_dump_scraper'); -const providers = [ - // horribleSubsScraper, +const PROVIDERS = [ + horribleSubsScraper, rarbgScraper, - // thepiratebayScraper, - // kickassScraper, - // leetxScraper + thepiratebayScraper, + kickassScraper, + leetxScraper ]; +const SCRAPE_CRON = process.env.SCRAPE_CRON || '* 0/4 * * * *'; async function scrape() { - return providers - .reduce((promise, scrapper) => promise.then(() => scrapper.scrape()), Promise.resolve()); + return PROVIDERS + .reduce((promise, scrapper) => promise + .then(() => scrapper.scrape().catch(() => Promise.resolve())), Promise.resolve()); } server.get('/', function (req, res) { res.send(200); }); -server.post('/scrape', function (req, res) { - scrape(); - res.send(200); -}); - server.listen(process.env.PORT || 7000, async function () { await connect(); + schedule.scheduleJob(SCRAPE_CRON, () => scrape()); console.log('Scraper started'); scrape(); }); \ No newline at end of file