Fixes linter issues

This commit is contained in:
freearhey 2025-01-01 10:18:30 +03:00
parent 60f3604ea5
commit 5b280dfbc8
17 changed files with 2895 additions and 2894 deletions

View file

@ -1,51 +1,51 @@
import { Logger, Storage, Collection } from '@freearhey/core'
import { ChannelsParser } from '../../core'
import path from 'path'
import { SITES_DIR, API_DIR } from '../../constants'
import { Channel } from 'epg-grabber'
type OutputItem = {
channel: string | null
site: string
site_id: string
site_name: string
lang: string
}
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR)
const parser = new ChannelsParser({ storage: sitesStorage })
let files: string[] = []
files = await sitesStorage.list('**/*.channels.xml')
let parsedChannels = new Collection()
for (const filepath of files) {
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
}
logger.info(` found ${parsedChannels.count()} channel(s)`)
const output = parsedChannels.map((channel: Channel): OutputItem => {
return {
channel: channel.xmltv_id || null,
site: channel.site || '',
site_id: channel.site_id || '',
site_name: channel.name,
lang: channel.lang || ''
}
})
const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()
import { Logger, Storage, Collection } from '@freearhey/core'
import { ChannelsParser } from '../../core'
import path from 'path'
import { SITES_DIR, API_DIR } from '../../constants'
import { Channel } from 'epg-grabber'
type OutputItem = {
channel: string | null
site: string
site_id: string
site_name: string
lang: string
}
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR)
const parser = new ChannelsParser({ storage: sitesStorage })
let files: string[] = []
files = await sitesStorage.list('**/*.channels.xml')
let parsedChannels = new Collection()
for (const filepath of files) {
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
}
logger.info(` found ${parsedChannels.count()} channel(s)`)
const output = parsedChannels.map((channel: Channel): OutputItem => {
return {
channel: channel.xmltv_id || null,
site: channel.site || '',
site_id: channel.site_id || '',
site_name: channel.name,
lang: channel.lang || ''
}
})
const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()

View file

@ -43,7 +43,7 @@ async function main() {
const channelsIndex = sj.createIndex(channelsContent)
const buffer = new Dictionary()
for (let option of options.all()) {
for (const option of options.all()) {
const channel: Channel = option.channel
if (channel.xmltv_id) {
if (channel.xmltv_id !== '-') {
@ -150,7 +150,7 @@ function getOptions(channelsIndex, channel: Channel) {
const query = channel.name
.replace(/\s(SD|TV|HD|SD\/HD|HDTV)$/i, '')
.replace(/(\(|\)|,)/gi, '')
.replace(/\-/gi, ' ')
.replace(/-/gi, ' ')
.replace(/\+/gi, '')
const similar = channelsIndex.search(query).map(item => new ApiChannel(item))

View file

@ -26,7 +26,7 @@ async function main() {
const logger = new Logger()
const file = new File(options.config)
const dir = file.dirname()
const config = require(path.resolve(options.config))
const config = (await import(path.resolve(options.config))).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
let channels = new Collection()

View file

@ -47,7 +47,6 @@ async function main() {
const parsedChannels = await parser.parse(filepath)
const bufferById = new Dictionary()
const bufferBySiteId = new Dictionary()
const errors: ValidationError[] = []
parsedChannels.forEach((channel: Channel) => {

View file

@ -1,58 +1,58 @@
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
import { IssueLoader, HTMLTable, Markdown } from '../../core'
import { Issue, Site } from '../../models'
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
import path from 'path'
async function main() {
const logger = new Logger({ disabled: true })
const loader = new IssueLoader()
const storage = new Storage(SITES_DIR)
const sites = new Collection()
logger.info('loading list of sites')
const folders = await storage.list('*/')
logger.info('loading issues...')
const issues = await loadIssues(loader)
logger.info('putting the data together...')
folders.forEach((domain: string) => {
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
const site = new Site({
domain,
issues: filteredIssues
})
sites.add(site)
})
logger.info('creating sites table...')
let data = new Collection()
sites.forEach((site: Site) => {
data.add([
`<a href="sites/${site.domain}">${site.domain}</a>`,
site.getStatus().emoji,
site.getIssues().all().join(', ')
])
})
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
const readmeStorage = new Storage(DOT_SITES_DIR)
await readmeStorage.save('_table.md', table.toString())
logger.info('updating sites.md...')
const configPath = path.join(DOT_SITES_DIR, 'config.json')
const sitesMarkdown = new Markdown(configPath)
sitesMarkdown.compile()
}
main()
async function loadIssues(loader: IssueLoader) {
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
return issuesWithStatusWarning.concat(issuesWithStatusDown)
}
import { Logger, Storage, Collection } from '@freearhey/core'
import { IssueLoader, HTMLTable, Markdown } from '../../core'
import { Issue, Site } from '../../models'
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
import path from 'path'
async function main() {
const logger = new Logger({ disabled: true })
const loader = new IssueLoader()
const storage = new Storage(SITES_DIR)
const sites = new Collection()
logger.info('loading list of sites')
const folders = await storage.list('*/')
logger.info('loading issues...')
const issues = await loadIssues(loader)
logger.info('putting the data together...')
folders.forEach((domain: string) => {
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
const site = new Site({
domain,
issues: filteredIssues
})
sites.add(site)
})
logger.info('creating sites table...')
const data = new Collection()
sites.forEach((site: Site) => {
data.add([
`<a href="sites/${site.domain}">${site.domain}</a>`,
site.getStatus().emoji,
site.getIssues().all().join(', ')
])
})
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
const readmeStorage = new Storage(DOT_SITES_DIR)
await readmeStorage.save('_table.md', table.toString())
logger.info('updating sites.md...')
const configPath = path.join(DOT_SITES_DIR, 'config.json')
const sitesMarkdown = new Markdown(configPath)
sitesMarkdown.compile()
}
main()
async function loadIssues(loader: IssueLoader) {
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
return issuesWithStatusWarning.concat(issuesWithStatusDown)
}