mirror of
https://github.com/iptv-org/database.git
synced 2025-05-09 19:20:01 -04:00
Update scripts
This commit is contained in:
parent
77680e2dc9
commit
075c53143e
14 changed files with 520 additions and 132 deletions
|
@ -3,8 +3,9 @@ import { DATA_DIR } from '../constants'
|
|||
import schemesData from '../schemes'
|
||||
import { program } from 'commander'
|
||||
import Joi from 'joi'
|
||||
import { CSVParser, IDCreator } from '../core'
|
||||
import { CSVParser } from '../core'
|
||||
import chalk from 'chalk'
|
||||
import { createChannelId } from '../utils'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
|
@ -42,11 +43,15 @@ async function main() {
|
|||
|
||||
let grouped
|
||||
switch (filename) {
|
||||
case 'feeds':
|
||||
grouped = data.keyBy(item => item.channel + item.id)
|
||||
break
|
||||
case 'blocklist':
|
||||
grouped = data.keyBy(item => item.channel + item.ref)
|
||||
break
|
||||
case 'categories':
|
||||
case 'channels':
|
||||
case 'timezones':
|
||||
grouped = data.keyBy(item => item.id)
|
||||
break
|
||||
default:
|
||||
|
@ -91,6 +96,17 @@ async function main() {
|
|||
)
|
||||
}
|
||||
break
|
||||
case 'feeds':
|
||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['channel', 'id']))
|
||||
fileErrors = fileErrors.concat(validateMainFeeds(rowsCopy))
|
||||
for (const [i, row] of rowsCopy.entries()) {
|
||||
fileErrors = fileErrors.concat(validateChannel(row.channel, i))
|
||||
fileErrors = fileErrors.concat(validateTimezones(row, i))
|
||||
fileErrors = fileErrors.concat(
|
||||
checkValue(i, row, 'id', 'replaced_by', buffer.get('channels'))
|
||||
)
|
||||
}
|
||||
break
|
||||
case 'blocklist':
|
||||
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, ['channel', 'ref']))
|
||||
for (const [i, row] of rowsCopy.entries()) {
|
||||
|
@ -201,7 +217,7 @@ function findDuplicatesBy(rows: { [key: string]: string }[], keys: string[]) {
|
|||
const buffer = new Dictionary()
|
||||
|
||||
rows.forEach((row, i) => {
|
||||
const normId = keys.map(key => row[key].toLowerCase()).join()
|
||||
const normId = keys.map(key => row[key].toString().toLowerCase()).join()
|
||||
if (buffer.has(normId)) {
|
||||
const fieldsList = keys.map(key => `${key} "${row[key]}"`).join(' and ')
|
||||
errors.push({
|
||||
|
@ -216,10 +232,31 @@ function findDuplicatesBy(rows: { [key: string]: string }[], keys: string[]) {
|
|||
return errors
|
||||
}
|
||||
|
||||
function validateMainFeeds(rows: { [key: string]: string }[]) {
|
||||
const errors = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
|
||||
rows.forEach((row, i) => {
|
||||
const normId = `${row.channel}${row.is_main}`
|
||||
if (buffer.has(normId)) {
|
||||
errors.push({
|
||||
line: i + 2,
|
||||
message: `entry with the channel "${row.channel}" and is_main "true" already exists`
|
||||
})
|
||||
}
|
||||
|
||||
if (row.is_main) {
|
||||
buffer.set(normId, true)
|
||||
}
|
||||
})
|
||||
|
||||
return errors
|
||||
}
|
||||
|
||||
function validateChannelId(row: { [key: string]: string }, i: number) {
|
||||
const errors = new Collection()
|
||||
|
||||
const expectedId = new IDCreator().create(row.name, row.country)
|
||||
const expectedId = createChannelId(row.name, row.country)
|
||||
|
||||
if (expectedId !== row.id) {
|
||||
errors.push({
|
||||
|
@ -254,6 +291,22 @@ function validateChannelBroadcastArea(row: { [key: string]: string[] }, i: numbe
|
|||
return errors
|
||||
}
|
||||
|
||||
function validateTimezones(row: { [key: string]: string[] }, i: number) {
|
||||
const errors = new Collection()
|
||||
const timezones = buffer.get('timezones')
|
||||
|
||||
row.timezones.forEach((timezone: string) => {
|
||||
if (timezones.missing(timezone)) {
|
||||
errors.push({
|
||||
line: i + 2,
|
||||
message: `"${row.channel}@${row.id}" has the wrong timezone "${timezone}"`
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return errors
|
||||
}
|
||||
|
||||
function handleError(message: string) {
|
||||
logger.error(chalk.red(message))
|
||||
process.exit(1)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue