[scraper] updates scrapers and unique index

This commit is contained in:
TheBeastLT
2020-03-14 22:25:51 +01:00
parent 1efaa0451c
commit 326a07b82e
13 changed files with 110 additions and 57 deletions

View File

@@ -96,10 +96,11 @@ async function getImdbId(info, type) {
});
}).catch(() => bing.web(`${name} ${year || ''} ${type} imdb`)
.then(results => results
.map((result) => result.link)
.map(result => result.link)
.find(result => result.includes('imdb.com/title/')))
.then(result => result && result.match(/imdb\.com\/title\/(tt\d+)/))
.then(match => match && match[1])));
.then(match => match && match[1])))
.then(imdbId => 'tt' + imdbId.replace(/tt0*([1-9][0-9]*)$/, '$1').padStart(7, '0'));
}
async function getKitsuId(info) {

View File

@@ -1,9 +1,14 @@
const { Sequelize } = require('sequelize');
const { Sequelize, fn, col } = require('sequelize');
const Op = Sequelize.Op;
const DATABASE_URI = process.env.DATABASE_URI;
const database = new Sequelize(DATABASE_URI, { logging: false });
const database = new Sequelize(
DATABASE_URI,
{
logging: false
}
);
const Provider = database.define('provider', {
name: { type: Sequelize.STRING(32), primaryKey: true },
@@ -47,6 +52,19 @@ const File = database.define('file',
},
{
indexes: [
{
unique: true,
name: 'files_unique_file_constraint',
fields: [
col('infoHash'),
fn('COALESCE', (col('fileIndex')), -1),
fn('COALESCE', (col('imdbId')), 'null'),
fn('COALESCE', (col('imdbSeason')), -1),
fn('COALESCE', (col('imdbEpisode')), -1),
fn('COALESCE', (col('kitsuId')), -1),
fn('COALESCE', (col('kitsuEpisode')), -1)
]
},
{ unique: false, fields: ['imdbId', 'imdbSeason', 'imdbEpisode'] },
{ unique: false, fields: ['kitsuId', 'kitsuEpisode'] }
]
@@ -63,7 +81,11 @@ const FailedImdbTorrent = database.define('failed_imdb_torrent', {
});
function connect() {
return database.sync({ alter: true });
return database.sync({ alter: true })
.catch(error => {
console.error('Failed syncing database: ', error);
throw error;
});
}
function getProvider(provider) {

View File

@@ -111,7 +111,7 @@ async function filesFromTorrentStream(torrent) {
function filterVideos(files) {
return files.filter((file) => {
const match = file.path.match(/\.(\w{2,4})$/);
return match && EXTENSIONS.includes(match[1]);
return match && EXTENSIONS.includes(match[1].toLowerCase());
});
}

View File

@@ -62,8 +62,9 @@ async function parseTorrentFiles(torrent) {
}
async function getSeriesFiles(torrent, parsedTorrentName) {
if ((parsedTorrentName.episode && (!parsedTorrentName.seasons || parsedTorrentName.seasons.length <= 1)) ||
(!parsedTorrentName.episodes && parsedTorrentName.date)) {
if (!parsedTorrentName.complete && !parsedTorrentName.hasMovies &&
((parsedTorrentName.episode && (!parsedTorrentName.seasons || parsedTorrentName.seasons.length <= 1)) ||
(!parsedTorrentName.episodes && parsedTorrentName.date))) {
return [{
name: torrent.title,
path: torrent.title,