mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-09 08:30:06 -04:00
Update scripts
This commit is contained in:
parent
37664b49b9
commit
5dd131e2d3
15 changed files with 274 additions and 204 deletions
|
@ -2,10 +2,11 @@ import { Logger, Storage, Collection } from '@freearhey/core'
|
||||||
import { ChannelsParser } from '../../core'
|
import { ChannelsParser } from '../../core'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { SITES_DIR, API_DIR } from '../../constants'
|
import { SITES_DIR, API_DIR } from '../../constants'
|
||||||
import { Channel } from 'epg-grabber'
|
import epgGrabber from 'epg-grabber'
|
||||||
|
|
||||||
type OutputItem = {
|
type OutputItem = {
|
||||||
channel: string | null
|
channel: string | null
|
||||||
|
feed: string | null
|
||||||
site: string
|
site: string
|
||||||
site_id: string
|
site_id: string
|
||||||
site_name: string
|
site_name: string
|
||||||
|
@ -31,9 +32,13 @@ async function main() {
|
||||||
|
|
||||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||||
|
|
||||||
const output = parsedChannels.map((channel: Channel): OutputItem => {
|
const output = parsedChannels.map((channel: epgGrabber.Channel): OutputItem => {
|
||||||
|
const xmltv_id = channel.xmltv_id || ''
|
||||||
|
const [channelId, feedId] = xmltv_id.split('@')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
channel: channel.xmltv_id || null,
|
channel: channelId || null,
|
||||||
|
feed: feedId || null,
|
||||||
site: channel.site || '',
|
site: channel.site || '',
|
||||||
site_id: channel.site_id || '',
|
site_id: channel.site_id || '',
|
||||||
site_name: channel.name,
|
site_name: channel.name,
|
||||||
|
|
|
@ -7,6 +7,7 @@ async function main() {
|
||||||
|
|
||||||
const requests = [
|
const requests = [
|
||||||
client.download('channels.json'),
|
client.download('channels.json'),
|
||||||
|
client.download('feeds.json'),
|
||||||
client.download('countries.json'),
|
client.download('countries.json'),
|
||||||
client.download('regions.json'),
|
client.download('regions.json'),
|
||||||
client.download('subdivisions.json')
|
client.download('subdivisions.json')
|
||||||
|
|
1
scripts/commands/channels/.gitignore
vendored
1
scripts/commands/channels/.gitignore
vendored
|
@ -1 +0,0 @@
|
||||||
/replace.ts
|
|
|
@ -1,12 +1,16 @@
|
||||||
|
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||||
|
import { select, input } from '@inquirer/prompts'
|
||||||
|
import { ChannelsParser, XML } from '../../core'
|
||||||
|
import { Channel, Feed } from '../../models'
|
||||||
import { DATA_DIR } from '../../constants'
|
import { DATA_DIR } from '../../constants'
|
||||||
import { Storage, Collection, Logger } from '@freearhey/core'
|
|
||||||
import { ChannelsParser, XML, ApiChannel } from '../../core'
|
|
||||||
import { Channel } from 'epg-grabber'
|
|
||||||
import nodeCleanup from 'node-cleanup'
|
import nodeCleanup from 'node-cleanup'
|
||||||
import { program } from 'commander'
|
import epgGrabber from 'epg-grabber'
|
||||||
import inquirer, { QuestionCollection } from 'inquirer'
|
import { Command } from 'commander'
|
||||||
import Fuse from 'fuse.js'
|
|
||||||
import readline from 'readline'
|
import readline from 'readline'
|
||||||
|
import Fuse from 'fuse.js'
|
||||||
|
|
||||||
|
type ChoiceValue = { type: string; value?: Feed | Channel }
|
||||||
|
type Choice = { name: string; short?: string; value: ChoiceValue }
|
||||||
|
|
||||||
if (process.platform === 'win32') {
|
if (process.platform === 'win32') {
|
||||||
readline
|
readline
|
||||||
|
@ -19,105 +23,159 @@ if (process.platform === 'win32') {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const program = new Command()
|
||||||
|
|
||||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||||
|
|
||||||
const filepath = program.args[0]
|
const filepath = program.args[0]
|
||||||
|
|
||||||
const logger = new Logger()
|
const logger = new Logger()
|
||||||
const storage = new Storage()
|
const storage = new Storage()
|
||||||
let channels = new Collection()
|
let parsedChannels = new Collection()
|
||||||
|
|
||||||
async function main() {
|
main(filepath)
|
||||||
|
nodeCleanup(() => {
|
||||||
|
save(filepath)
|
||||||
|
})
|
||||||
|
|
||||||
|
export default async function main(filepath: string) {
|
||||||
if (!(await storage.exists(filepath))) {
|
if (!(await storage.exists(filepath))) {
|
||||||
throw new Error(`File "${filepath}" does not exists`)
|
throw new Error(`File "${filepath}" does not exists`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const parser = new ChannelsParser({ storage })
|
const parser = new ChannelsParser({ storage })
|
||||||
channels = await parser.parse(filepath)
|
parsedChannels = await parser.parse(filepath)
|
||||||
|
|
||||||
const dataStorage = new Storage(DATA_DIR)
|
const dataStorage = new Storage(DATA_DIR)
|
||||||
const channelsContent = await dataStorage.json('channels.json')
|
const channelsData = await dataStorage.json('channels.json')
|
||||||
const searchIndex = new Fuse(channelsContent, { keys: ['name', 'alt_names'], threshold: 0.4 })
|
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||||
|
const feedsData = await dataStorage.json('feeds.json')
|
||||||
|
const feeds = new Collection(feedsData).map(data => new Feed(data))
|
||||||
|
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||||
|
|
||||||
for (const channel of channels.all()) {
|
const searchIndex: Fuse<Channel> = new Fuse(channels.all(), {
|
||||||
if (channel.xmltv_id) continue
|
keys: ['name', 'alt_names'],
|
||||||
const question: QuestionCollection = {
|
threshold: 0.4
|
||||||
name: 'option',
|
|
||||||
message: `Select xmltv_id for "${channel.name}" (${channel.site_id}):`,
|
|
||||||
type: 'list',
|
|
||||||
choices: getOptions(searchIndex, channel),
|
|
||||||
pageSize: 10
|
|
||||||
}
|
|
||||||
|
|
||||||
await inquirer.prompt(question).then(async selected => {
|
|
||||||
switch (selected.option) {
|
|
||||||
case 'Type...':
|
|
||||||
const input = await getInput(channel)
|
|
||||||
channel.xmltv_id = input.xmltv_id
|
|
||||||
break
|
|
||||||
case 'Skip':
|
|
||||||
channel.xmltv_id = '-'
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
const [, xmltv_id] = selected.option
|
|
||||||
.replace(/ \[.*\]/, '')
|
|
||||||
.split('|')
|
|
||||||
.map((i: string) => i.trim())
|
|
||||||
channel.xmltv_id = xmltv_id
|
|
||||||
break
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
for (const channel of parsedChannels.all()) {
|
||||||
|
if (channel.xmltv_id) continue
|
||||||
|
try {
|
||||||
|
channel.xmltv_id = await selectChannel(channel, searchIndex, feedsGroupedByChannelId)
|
||||||
|
} catch {
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
channels.forEach((channel: Channel) => {
|
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||||
if (channel.xmltv_id === '-') {
|
if (channel.xmltv_id === '-') {
|
||||||
channel.xmltv_id = ''
|
channel.xmltv_id = ''
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
async function selectChannel(
|
||||||
|
channel: epgGrabber.Channel,
|
||||||
|
searchIndex: Fuse<Channel>,
|
||||||
|
feedsGroupedByChannelId: Dictionary
|
||||||
|
): Promise<string> {
|
||||||
|
const similarChannels = searchIndex
|
||||||
|
.search(channel.name)
|
||||||
|
.map((result: { item: Channel }) => result.item)
|
||||||
|
|
||||||
function save() {
|
const selected: ChoiceValue = await select({
|
||||||
|
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||||
|
choices: getChannelChoises(new Collection(similarChannels)),
|
||||||
|
pageSize: 10
|
||||||
|
})
|
||||||
|
|
||||||
|
switch (selected.type) {
|
||||||
|
case 'skip':
|
||||||
|
return '-'
|
||||||
|
case 'type': {
|
||||||
|
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||||
|
const typedFeedId = await input({ message: ' Feed ID:', default: 'SD' })
|
||||||
|
return [typedChannelId, typedFeedId].join('@')
|
||||||
|
}
|
||||||
|
case 'channel': {
|
||||||
|
const selectedChannel = selected.value
|
||||||
|
if (!selectedChannel) return ''
|
||||||
|
const selectedFeedId = await selectFeed(selectedChannel.id, feedsGroupedByChannelId)
|
||||||
|
return [selectedChannel.id, selectedFeedId].join('@')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||||
|
const channelFeeds = feedsGroupedByChannelId.get(channelId) || []
|
||||||
|
if (channelFeeds.length <= 1) return ''
|
||||||
|
|
||||||
|
const selected: ChoiceValue = await select({
|
||||||
|
message: `Select feed ID for "${channelId}":`,
|
||||||
|
choices: getFeedChoises(channelFeeds),
|
||||||
|
pageSize: 10
|
||||||
|
})
|
||||||
|
|
||||||
|
switch (selected.type) {
|
||||||
|
case 'type':
|
||||||
|
return await input({ message: ' Feed ID:' })
|
||||||
|
case 'feed':
|
||||||
|
const selectedFeed = selected.value
|
||||||
|
if (!selectedFeed) return ''
|
||||||
|
return selectedFeed.id
|
||||||
|
}
|
||||||
|
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
function getChannelChoises(channels: Collection): Choice[] {
|
||||||
|
const choises: Choice[] = []
|
||||||
|
|
||||||
|
channels.forEach((channel: Channel) => {
|
||||||
|
const names = [channel.name, ...channel.altNames.all()].join(', ')
|
||||||
|
|
||||||
|
choises.push({
|
||||||
|
value: {
|
||||||
|
type: 'channel',
|
||||||
|
value: channel
|
||||||
|
},
|
||||||
|
name: `${channel.id} (${names})`,
|
||||||
|
short: `${channel.id}`
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||||
|
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||||
|
|
||||||
|
return choises
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFeedChoises(feeds: Collection): Choice[] {
|
||||||
|
const choises: Choice[] = []
|
||||||
|
|
||||||
|
feeds.forEach((feed: Feed) => {
|
||||||
|
let name = `${feed.id} (${feed.name})`
|
||||||
|
if (feed.isMain) name += ' [main]'
|
||||||
|
|
||||||
|
choises.push({
|
||||||
|
value: {
|
||||||
|
type: 'feed',
|
||||||
|
value: feed
|
||||||
|
},
|
||||||
|
name,
|
||||||
|
short: feed.id
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||||
|
|
||||||
|
return choises
|
||||||
|
}
|
||||||
|
|
||||||
|
function save(filepath: string) {
|
||||||
if (!storage.existsSync(filepath)) return
|
if (!storage.existsSync(filepath)) return
|
||||||
|
const xml = new XML(parsedChannels)
|
||||||
const xml = new XML(channels)
|
|
||||||
|
|
||||||
storage.saveSync(filepath, xml.toString())
|
storage.saveSync(filepath, xml.toString())
|
||||||
|
|
||||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeCleanup(() => {
|
|
||||||
save()
|
|
||||||
})
|
|
||||||
|
|
||||||
async function getInput(channel: Channel) {
|
|
||||||
const name = channel.name.trim()
|
|
||||||
const input = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
name: 'xmltv_id',
|
|
||||||
message: ' xmltv_id:',
|
|
||||||
type: 'input'
|
|
||||||
}
|
|
||||||
])
|
|
||||||
|
|
||||||
return { name, xmltv_id: input['xmltv_id'] }
|
|
||||||
}
|
|
||||||
|
|
||||||
function getOptions(index, channel: Channel) {
|
|
||||||
const similar = index.search(channel.name).map(result => new ApiChannel(result.item))
|
|
||||||
|
|
||||||
const variants = new Collection()
|
|
||||||
similar.forEach((_channel: ApiChannel) => {
|
|
||||||
const altNames = _channel.altNames.notEmpty() ? ` (${_channel.altNames.join(',')})` : ''
|
|
||||||
const closed = _channel.closed ? ` [closed:${_channel.closed}]` : ''
|
|
||||||
const replacedBy = _channel.replacedBy ? `[replaced_by:${_channel.replacedBy}]` : ''
|
|
||||||
|
|
||||||
variants.add(`${_channel.name}${altNames} | ${_channel.id}${closed}${replacedBy}`)
|
|
||||||
})
|
|
||||||
variants.add('Type...')
|
|
||||||
variants.add('Skip')
|
|
||||||
|
|
||||||
return variants.all()
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import { Storage, Collection, Dictionary, File } from '@freearhey/core'
|
import { Storage, Collection, Dictionary, File } from '@freearhey/core'
|
||||||
import { ChannelsParser, ApiChannel } from '../../core'
|
import { ChannelsParser } from '../../core'
|
||||||
|
import { Channel } from '../../models'
|
||||||
import { program } from 'commander'
|
import { program } from 'commander'
|
||||||
import chalk from 'chalk'
|
import chalk from 'chalk'
|
||||||
import langs from 'langs'
|
import langs from 'langs'
|
||||||
import { DATA_DIR } from '../../constants'
|
import { DATA_DIR } from '../../constants'
|
||||||
import { Channel } from 'epg-grabber'
|
import epgGrabber from 'epg-grabber'
|
||||||
|
|
||||||
program.argument('[filepath]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
program.argument('[filepath]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||||
|
|
||||||
|
@ -21,8 +22,9 @@ async function main() {
|
||||||
const parser = new ChannelsParser({ storage: new Storage() })
|
const parser = new ChannelsParser({ storage: new Storage() })
|
||||||
|
|
||||||
const dataStorage = new Storage(DATA_DIR)
|
const dataStorage = new Storage(DATA_DIR)
|
||||||
const channelsContent = await dataStorage.json('channels.json')
|
const channelsData = await dataStorage.json('channels.json')
|
||||||
const channels = new Collection(channelsContent).map(data => new ApiChannel(data))
|
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||||
|
const channelsGroupedById = channels.groupBy((channel: Channel) => channel.id)
|
||||||
|
|
||||||
let totalFiles = 0
|
let totalFiles = 0
|
||||||
let totalErrors = 0
|
let totalErrors = 0
|
||||||
|
@ -37,7 +39,7 @@ async function main() {
|
||||||
|
|
||||||
const bufferBySiteId = new Dictionary()
|
const bufferBySiteId = new Dictionary()
|
||||||
const errors: ValidationError[] = []
|
const errors: ValidationError[] = []
|
||||||
parsedChannels.forEach((channel: Channel) => {
|
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||||
const bufferId: string = channel.site_id
|
const bufferId: string = channel.site_id
|
||||||
if (bufferBySiteId.missing(bufferId)) {
|
if (bufferBySiteId.missing(bufferId)) {
|
||||||
bufferBySiteId.set(bufferId, true)
|
bufferBySiteId.set(bufferId, true)
|
||||||
|
@ -52,10 +54,8 @@ async function main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!channel.xmltv_id) return
|
if (!channel.xmltv_id) return
|
||||||
|
const [channelId] = channel.xmltv_id.split('@')
|
||||||
const foundChannel = channels.first(
|
const foundChannel = channelsGroupedById.get(channelId)
|
||||||
(_channel: ApiChannel) => _channel.id === channel.xmltv_id
|
|
||||||
)
|
|
||||||
if (!foundChannel) {
|
if (!foundChannel) {
|
||||||
errors.push({ type: 'wrong_xmltv_id', ...channel })
|
errors.push({ type: 'wrong_xmltv_id', ...channel })
|
||||||
totalErrors++
|
totalErrors++
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { Logger, Storage } from '@freearhey/core'
|
import { Logger, Storage } from '@freearhey/core'
|
||||||
import { program } from 'commander'
|
|
||||||
import { SITES_DIR } from '../../constants'
|
import { SITES_DIR } from '../../constants'
|
||||||
import fs from 'fs-extra'
|
|
||||||
import { pathToFileURL } from 'node:url'
|
import { pathToFileURL } from 'node:url'
|
||||||
|
import { program } from 'commander'
|
||||||
|
import fs from 'fs-extra'
|
||||||
|
|
||||||
program.argument('<site>', 'Domain name of the site').parse(process.argv)
|
program.argument('<site>', 'Domain name of the site').parse(process.argv)
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { Channel } from 'epg-grabber'
|
|
||||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
|
||||||
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
||||||
import { Issue, Site } from '../../models'
|
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||||
|
import { Issue, Site } from '../../models'
|
||||||
|
import { Channel } from 'epg-grabber'
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const logger = new Logger({ disabled: true })
|
const logger = new Logger({ disabled: true })
|
||||||
|
@ -15,11 +15,17 @@ async function main() {
|
||||||
const folders = await sitesStorage.list('*/')
|
const folders = await sitesStorage.list('*/')
|
||||||
|
|
||||||
logger.info('loading issues...')
|
logger.info('loading issues...')
|
||||||
const issues = await loadIssues(loader)
|
const issues = await loader.load()
|
||||||
|
|
||||||
logger.info('putting the data together...')
|
logger.info('putting the data together...')
|
||||||
|
const brokenGuideReports = issues.filter(issue =>
|
||||||
|
issue.labels.find((label: string) => label === 'broken guide')
|
||||||
|
)
|
||||||
for (const domain of folders) {
|
for (const domain of folders) {
|
||||||
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
|
const filteredIssues = brokenGuideReports.filter(
|
||||||
|
(issue: Issue) => domain === issue.data.get('site')
|
||||||
|
)
|
||||||
|
|
||||||
const site = new Site({
|
const site = new Site({
|
||||||
domain,
|
domain,
|
||||||
issues: filteredIssues
|
issues: filteredIssues
|
||||||
|
@ -62,10 +68,3 @@ async function main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
async function loadIssues(loader: IssueLoader) {
|
|
||||||
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
|
|
||||||
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
|
|
||||||
|
|
||||||
return issuesWithStatusWarning.concat(issuesWithStatusDown)
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
import { Collection } from '@freearhey/core'
|
|
||||||
|
|
||||||
type ApiChannelProps = {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
alt_names: string[]
|
|
||||||
network: string
|
|
||||||
owners: string[]
|
|
||||||
country: string
|
|
||||||
subdivision: string
|
|
||||||
city: string
|
|
||||||
broadcast_area: string[]
|
|
||||||
languages: string[]
|
|
||||||
categories: string[]
|
|
||||||
is_nsfw: boolean
|
|
||||||
launched: string
|
|
||||||
closed: string
|
|
||||||
replaced_by: string
|
|
||||||
website: string
|
|
||||||
logo: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ApiChannel {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
altNames: Collection
|
|
||||||
network: string
|
|
||||||
owners: Collection
|
|
||||||
country: string
|
|
||||||
subdivision: string
|
|
||||||
city: string
|
|
||||||
broadcastArea: Collection
|
|
||||||
languages: Collection
|
|
||||||
categories: Collection
|
|
||||||
isNSFW: boolean
|
|
||||||
launched: string
|
|
||||||
closed: string
|
|
||||||
replacedBy: string
|
|
||||||
website: string
|
|
||||||
logo: string
|
|
||||||
|
|
||||||
constructor({
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
alt_names,
|
|
||||||
network,
|
|
||||||
owners,
|
|
||||||
country,
|
|
||||||
subdivision,
|
|
||||||
city,
|
|
||||||
broadcast_area,
|
|
||||||
languages,
|
|
||||||
categories,
|
|
||||||
is_nsfw,
|
|
||||||
launched,
|
|
||||||
closed,
|
|
||||||
replaced_by,
|
|
||||||
website,
|
|
||||||
logo
|
|
||||||
}: ApiChannelProps) {
|
|
||||||
this.id = id
|
|
||||||
this.name = name
|
|
||||||
this.altNames = new Collection(alt_names)
|
|
||||||
this.network = network
|
|
||||||
this.owners = new Collection(owners)
|
|
||||||
this.country = country
|
|
||||||
this.subdivision = subdivision
|
|
||||||
this.city = city
|
|
||||||
this.broadcastArea = new Collection(broadcast_area)
|
|
||||||
this.languages = new Collection(languages)
|
|
||||||
this.categories = new Collection(categories)
|
|
||||||
this.isNSFW = is_nsfw
|
|
||||||
this.launched = launched
|
|
||||||
this.closed = closed
|
|
||||||
this.replacedBy = replaced_by
|
|
||||||
this.website = website
|
|
||||||
this.logo = logo
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -7,7 +7,6 @@ export * from './job'
|
||||||
export * from './queue'
|
export * from './queue'
|
||||||
export * from './guideManager'
|
export * from './guideManager'
|
||||||
export * from './guide'
|
export * from './guide'
|
||||||
export * from './apiChannel'
|
|
||||||
export * from './apiClient'
|
export * from './apiClient'
|
||||||
export * from './queueCreator'
|
export * from './queueCreator'
|
||||||
export * from './issueLoader'
|
export * from './issueLoader'
|
||||||
|
|
|
@ -1,27 +1,22 @@
|
||||||
import { Collection } from '@freearhey/core'
|
|
||||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||||
|
import { TESTING, OWNER, REPO } from '../constants'
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
import { Octokit } from '@octokit/core'
|
import { Octokit } from '@octokit/core'
|
||||||
import { IssueParser } from './'
|
import { IssueParser } from './'
|
||||||
import { TESTING, OWNER, REPO } from '../constants'
|
|
||||||
|
|
||||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||||
const octokit = new CustomOctokit()
|
const octokit = new CustomOctokit()
|
||||||
|
|
||||||
export class IssueLoader {
|
export class IssueLoader {
|
||||||
async load({ labels }: { labels: string[] | string }) {
|
async load(props?: { labels: string[] | string }) {
|
||||||
labels = Array.isArray(labels) ? labels.join(',') : labels
|
let labels = ''
|
||||||
|
if (props && props.labels) {
|
||||||
|
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||||
|
}
|
||||||
let issues: object[] = []
|
let issues: object[] = []
|
||||||
if (TESTING) {
|
if (TESTING) {
|
||||||
switch (labels) {
|
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
|
||||||
case 'broken guide,status:warning':
|
|
||||||
issues = (await import('../../tests/__data__/input/issues/broken_guide_warning.mjs'))
|
|
||||||
.default
|
|
||||||
break
|
|
||||||
case 'broken guide,status:down':
|
|
||||||
issues = (await import('../../tests/__data__/input/issues/broken_guide_down.mjs')).default
|
|
||||||
break
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||||
owner: OWNER,
|
owner: OWNER,
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
||||||
import { ChannelsParser, ConfigLoader, ApiChannel, Queue } from './'
|
import { ChannelsParser, ConfigLoader, Queue } from './'
|
||||||
import { SITES_DIR, DATA_DIR } from '../constants'
|
import { SITES_DIR, DATA_DIR } from '../constants'
|
||||||
import { SiteConfig } from 'epg-grabber'
|
import { SiteConfig } from 'epg-grabber'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { GrabOptions } from '../commands/epg/grab'
|
import { GrabOptions } from '../commands/epg/grab'
|
||||||
|
import { Channel } from '../models'
|
||||||
|
|
||||||
type QueueCreatorProps = {
|
type QueueCreatorProps = {
|
||||||
logger: Logger
|
logger: Logger
|
||||||
|
@ -32,7 +33,7 @@ export class QueueCreator {
|
||||||
|
|
||||||
async create(): Promise<Queue> {
|
async create(): Promise<Queue> {
|
||||||
const channelsContent = await this.dataStorage.json('channels.json')
|
const channelsContent = await this.dataStorage.json('channels.json')
|
||||||
const channels = new Collection(channelsContent).map(data => new ApiChannel(data))
|
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||||
|
|
||||||
const queue = new Queue()
|
const queue = new Queue()
|
||||||
for (const channel of this.parsedChannels.all()) {
|
for (const channel of this.parsedChannels.all()) {
|
||||||
|
@ -44,8 +45,8 @@ export class QueueCreator {
|
||||||
|
|
||||||
if (channel.xmltv_id) {
|
if (channel.xmltv_id) {
|
||||||
if (!channel.icon) {
|
if (!channel.icon) {
|
||||||
const found: ApiChannel = channels.first(
|
const found: Channel = channels.first(
|
||||||
(_channel: ApiChannel) => _channel.id === channel.xmltv_id
|
(_channel: Channel) => _channel.id === channel.xmltv_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if (found) {
|
if (found) {
|
||||||
|
|
56
scripts/models/channel.ts
Normal file
56
scripts/models/channel.ts
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
|
||||||
|
type ChannelData = {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
alt_names: string[]
|
||||||
|
network: string
|
||||||
|
owners: Collection
|
||||||
|
country: string
|
||||||
|
subdivision: string
|
||||||
|
city: string
|
||||||
|
categories: Collection
|
||||||
|
is_nsfw: boolean
|
||||||
|
launched: string
|
||||||
|
closed: string
|
||||||
|
replaced_by: string
|
||||||
|
website: string
|
||||||
|
logo: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Channel {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
altNames: Collection
|
||||||
|
network?: string
|
||||||
|
owners: Collection
|
||||||
|
countryCode: string
|
||||||
|
subdivisionCode?: string
|
||||||
|
cityName?: string
|
||||||
|
categoryIds: Collection
|
||||||
|
categories?: Collection
|
||||||
|
isNSFW: boolean
|
||||||
|
launched?: string
|
||||||
|
closed?: string
|
||||||
|
replacedBy?: string
|
||||||
|
website?: string
|
||||||
|
logo: string
|
||||||
|
|
||||||
|
constructor(data: ChannelData) {
|
||||||
|
this.id = data.id
|
||||||
|
this.name = data.name
|
||||||
|
this.altNames = new Collection(data.alt_names)
|
||||||
|
this.network = data.network || undefined
|
||||||
|
this.owners = new Collection(data.owners)
|
||||||
|
this.countryCode = data.country
|
||||||
|
this.subdivisionCode = data.subdivision || undefined
|
||||||
|
this.cityName = data.city || undefined
|
||||||
|
this.categoryIds = new Collection(data.categories)
|
||||||
|
this.isNSFW = data.is_nsfw
|
||||||
|
this.launched = data.launched || undefined
|
||||||
|
this.closed = data.closed || undefined
|
||||||
|
this.replacedBy = data.replaced_by || undefined
|
||||||
|
this.website = data.website || undefined
|
||||||
|
this.logo = data.logo
|
||||||
|
}
|
||||||
|
}
|
34
scripts/models/feed.ts
Normal file
34
scripts/models/feed.ts
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import { Collection } from '@freearhey/core'
|
||||||
|
|
||||||
|
type FeedData = {
|
||||||
|
channel: string
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
is_main: boolean
|
||||||
|
broadcast_area: Collection
|
||||||
|
languages: Collection
|
||||||
|
timezones: Collection
|
||||||
|
video_format: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Feed {
|
||||||
|
channelId: string
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
isMain: boolean
|
||||||
|
broadcastAreaCodes: Collection
|
||||||
|
languageCodes: Collection
|
||||||
|
timezoneIds: Collection
|
||||||
|
videoFormat: string
|
||||||
|
|
||||||
|
constructor(data: FeedData) {
|
||||||
|
this.channelId = data.channel
|
||||||
|
this.id = data.id
|
||||||
|
this.name = data.name
|
||||||
|
this.isMain = data.is_main
|
||||||
|
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||||
|
this.languageCodes = new Collection(data.languages)
|
||||||
|
this.timezoneIds = new Collection(data.timezones)
|
||||||
|
this.videoFormat = data.video_format
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,2 +1,4 @@
|
||||||
export * from './issue'
|
export * from './issue'
|
||||||
export * from './site'
|
export * from './site'
|
||||||
|
export * from './channel'
|
||||||
|
export * from './feed'
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# <DOMAIN>
|
# <DOMAIN>
|
||||||
|
|
||||||
https://example.com
|
https://<DOMAIN>
|
||||||
|
|
||||||
### Download the guide
|
### Download the guide
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue