Fixes linter issues

This commit is contained in:
freearhey 2025-01-01 10:18:30 +03:00
parent 60f3604ea5
commit 5b280dfbc8
17 changed files with 2895 additions and 2894 deletions

View file

@ -1,51 +1,51 @@
import { Logger, Storage, Collection } from '@freearhey/core' import { Logger, Storage, Collection } from '@freearhey/core'
import { ChannelsParser } from '../../core' import { ChannelsParser } from '../../core'
import path from 'path' import path from 'path'
import { SITES_DIR, API_DIR } from '../../constants' import { SITES_DIR, API_DIR } from '../../constants'
import { Channel } from 'epg-grabber' import { Channel } from 'epg-grabber'
type OutputItem = { type OutputItem = {
channel: string | null channel: string | null
site: string site: string
site_id: string site_id: string
site_name: string site_name: string
lang: string lang: string
} }
async function main() { async function main() {
const logger = new Logger() const logger = new Logger()
logger.start('staring...') logger.start('staring...')
logger.info('loading channels...') logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR) const sitesStorage = new Storage(SITES_DIR)
const parser = new ChannelsParser({ storage: sitesStorage }) const parser = new ChannelsParser({ storage: sitesStorage })
let files: string[] = [] let files: string[] = []
files = await sitesStorage.list('**/*.channels.xml') files = await sitesStorage.list('**/*.channels.xml')
let parsedChannels = new Collection() let parsedChannels = new Collection()
for (const filepath of files) { for (const filepath of files) {
parsedChannels = parsedChannels.concat(await parser.parse(filepath)) parsedChannels = parsedChannels.concat(await parser.parse(filepath))
} }
logger.info(` found ${parsedChannels.count()} channel(s)`) logger.info(` found ${parsedChannels.count()} channel(s)`)
const output = parsedChannels.map((channel: Channel): OutputItem => { const output = parsedChannels.map((channel: Channel): OutputItem => {
return { return {
channel: channel.xmltv_id || null, channel: channel.xmltv_id || null,
site: channel.site || '', site: channel.site || '',
site_id: channel.site_id || '', site_id: channel.site_id || '',
site_name: channel.name, site_name: channel.name,
lang: channel.lang || '' lang: channel.lang || ''
} }
}) })
const apiStorage = new Storage(API_DIR) const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json' const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON()) await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`) logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
} }
main() main()

View file

@ -43,7 +43,7 @@ async function main() {
const channelsIndex = sj.createIndex(channelsContent) const channelsIndex = sj.createIndex(channelsContent)
const buffer = new Dictionary() const buffer = new Dictionary()
for (let option of options.all()) { for (const option of options.all()) {
const channel: Channel = option.channel const channel: Channel = option.channel
if (channel.xmltv_id) { if (channel.xmltv_id) {
if (channel.xmltv_id !== '-') { if (channel.xmltv_id !== '-') {
@ -150,7 +150,7 @@ function getOptions(channelsIndex, channel: Channel) {
const query = channel.name const query = channel.name
.replace(/\s(SD|TV|HD|SD\/HD|HDTV)$/i, '') .replace(/\s(SD|TV|HD|SD\/HD|HDTV)$/i, '')
.replace(/(\(|\)|,)/gi, '') .replace(/(\(|\)|,)/gi, '')
.replace(/\-/gi, ' ') .replace(/-/gi, ' ')
.replace(/\+/gi, '') .replace(/\+/gi, '')
const similar = channelsIndex.search(query).map(item => new ApiChannel(item)) const similar = channelsIndex.search(query).map(item => new ApiChannel(item))

View file

@ -26,7 +26,7 @@ async function main() {
const logger = new Logger() const logger = new Logger()
const file = new File(options.config) const file = new File(options.config)
const dir = file.dirname() const dir = file.dirname()
const config = require(path.resolve(options.config)) const config = (await import(path.resolve(options.config))).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml` const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
let channels = new Collection() let channels = new Collection()

View file

@ -47,7 +47,6 @@ async function main() {
const parsedChannels = await parser.parse(filepath) const parsedChannels = await parser.parse(filepath)
const bufferById = new Dictionary()
const bufferBySiteId = new Dictionary() const bufferBySiteId = new Dictionary()
const errors: ValidationError[] = [] const errors: ValidationError[] = []
parsedChannels.forEach((channel: Channel) => { parsedChannels.forEach((channel: Channel) => {

View file

@ -1,58 +1,58 @@
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core' import { Logger, Storage, Collection } from '@freearhey/core'
import { IssueLoader, HTMLTable, Markdown } from '../../core' import { IssueLoader, HTMLTable, Markdown } from '../../core'
import { Issue, Site } from '../../models' import { Issue, Site } from '../../models'
import { SITES_DIR, DOT_SITES_DIR } from '../../constants' import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
import path from 'path' import path from 'path'
async function main() { async function main() {
const logger = new Logger({ disabled: true }) const logger = new Logger({ disabled: true })
const loader = new IssueLoader() const loader = new IssueLoader()
const storage = new Storage(SITES_DIR) const storage = new Storage(SITES_DIR)
const sites = new Collection() const sites = new Collection()
logger.info('loading list of sites') logger.info('loading list of sites')
const folders = await storage.list('*/') const folders = await storage.list('*/')
logger.info('loading issues...') logger.info('loading issues...')
const issues = await loadIssues(loader) const issues = await loadIssues(loader)
logger.info('putting the data together...') logger.info('putting the data together...')
folders.forEach((domain: string) => { folders.forEach((domain: string) => {
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site')) const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
const site = new Site({ const site = new Site({
domain, domain,
issues: filteredIssues issues: filteredIssues
}) })
sites.add(site) sites.add(site)
}) })
logger.info('creating sites table...') logger.info('creating sites table...')
let data = new Collection() const data = new Collection()
sites.forEach((site: Site) => { sites.forEach((site: Site) => {
data.add([ data.add([
`<a href="sites/${site.domain}">${site.domain}</a>`, `<a href="sites/${site.domain}">${site.domain}</a>`,
site.getStatus().emoji, site.getStatus().emoji,
site.getIssues().all().join(', ') site.getIssues().all().join(', ')
]) ])
}) })
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }]) const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
const readmeStorage = new Storage(DOT_SITES_DIR) const readmeStorage = new Storage(DOT_SITES_DIR)
await readmeStorage.save('_table.md', table.toString()) await readmeStorage.save('_table.md', table.toString())
logger.info('updating sites.md...') logger.info('updating sites.md...')
const configPath = path.join(DOT_SITES_DIR, 'config.json') const configPath = path.join(DOT_SITES_DIR, 'config.json')
const sitesMarkdown = new Markdown(configPath) const sitesMarkdown = new Markdown(configPath)
sitesMarkdown.compile() sitesMarkdown.compile()
} }
main() main()
async function loadIssues(loader: IssueLoader) { async function loadIssues(loader: IssueLoader) {
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] }) const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] }) const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
return issuesWithStatusWarning.concat(issuesWithStatusDown) return issuesWithStatusWarning.concat(issuesWithStatusDown)
} }

View file

@ -1,5 +1,6 @@
const dayjs = require('dayjs') import dayjs from 'dayjs'
const utc = require('dayjs/plugin/utc') import utc from 'dayjs/plugin/utc'
dayjs.extend(utc) dayjs.extend(utc)
const date = {} const date = {}
@ -10,4 +11,4 @@ date.getUTC = function (d = null) {
return dayjs.utc().startOf('d') return dayjs.utc().startOf('d')
} }
module.exports = date export default date

View file

@ -1,46 +1,46 @@
type Column = { type Column = {
name: string name: string
nowrap?: boolean nowrap?: boolean
align?: string align?: string
} }
type DataItem = string[] type DataItem = string[]
export class HTMLTable { export class HTMLTable {
data: DataItem[] data: DataItem[]
columns: Column[] columns: Column[]
constructor(data: DataItem[], columns: Column[]) { constructor(data: DataItem[], columns: Column[]) {
this.data = data this.data = data
this.columns = columns this.columns = columns
} }
toString() { toString() {
let output = '<table>\n' let output = '<table>\n'
output += ' <thead>\n <tr>' output += ' <thead>\n <tr>'
for (const column of this.columns) { for (const column of this.columns) {
output += `<th align="left">${column.name}</th>` output += `<th align="left">${column.name}</th>`
} }
output += '</tr>\n </thead>\n' output += '</tr>\n </thead>\n'
output += ' <tbody>\n' output += ' <tbody>\n'
for (const item of this.data) { for (const item of this.data) {
output += ' <tr>' output += ' <tr>'
let i = 0 let i = 0
for (const prop in item) { for (const prop in item) {
const column = this.columns[i] const column = this.columns[i]
const nowrap = column.nowrap ? ' nowrap' : '' const nowrap = column.nowrap ? ' nowrap' : ''
const align = column.align ? ` align="${column.align}"` : '' const align = column.align ? ` align="${column.align}"` : ''
output += `<td${align}${nowrap}>${item[prop]}</td>` output += `<td${align}${nowrap}>${item[prop]}</td>`
i++ i++
} }
output += '</tr>\n' output += '</tr>\n'
} }
output += ' </tbody>\n' output += ' </tbody>\n'
output += '</table>' output += '</table>'
return output return output
} }
} }

View file

@ -1,40 +1,41 @@
import { Collection } from '@freearhey/core' import { Collection } from '@freearhey/core'
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods' import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
import { paginateRest } from '@octokit/plugin-paginate-rest' import { paginateRest } from '@octokit/plugin-paginate-rest'
import { Octokit } from '@octokit/core' import { Octokit } from '@octokit/core'
import { IssueParser } from './' import { IssueParser } from './'
import { TESTING, OWNER, REPO } from '../constants' import { TESTING, OWNER, REPO } from '../constants'
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods) const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
const octokit = new CustomOctokit() const octokit = new CustomOctokit()
export class IssueLoader { export class IssueLoader {
async load({ labels }: { labels: string[] | string }) { async load({ labels }: { labels: string[] | string }) {
labels = Array.isArray(labels) ? labels.join(',') : labels labels = Array.isArray(labels) ? labels.join(',') : labels
let issues: object[] = [] let issues: object[] = []
if (TESTING) { if (TESTING) {
switch (labels) { switch (labels) {
case 'broken guide,status:warning': case 'broken guide,status:warning':
issues = require('../../tests/__data__/input/issues/broken_guide_warning.js') issues = (await import('../../tests/__data__/input/issues/broken_guide_warning.js'))
break .default
case 'broken guide,status:down': break
issues = require('../../tests/__data__/input/issues/broken_guide_down.js') case 'broken guide,status:down':
break issues = (await import('../../tests/__data__/input/issues/broken_guide_down.js')).default
} break
} else { }
issues = await octokit.paginate(octokit.rest.issues.listForRepo, { } else {
owner: OWNER, issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
repo: REPO, owner: OWNER,
per_page: 100, repo: REPO,
labels, per_page: 100,
headers: { labels,
'X-GitHub-Api-Version': '2022-11-28' headers: {
} 'X-GitHub-Api-Version': '2022-11-28'
}) }
} })
}
const parser = new IssueParser()
const parser = new IssueParser()
return new Collection(issues).map(parser.parse)
} return new Collection(issues).map(parser.parse)
} }
}

View file

@ -1,34 +1,34 @@
import { Dictionary } from '@freearhey/core' import { Dictionary } from '@freearhey/core'
import { Issue } from '../models' import { Issue } from '../models'
const FIELDS = new Dictionary({ const FIELDS = new Dictionary({
Site: 'site' Site: 'site'
}) })
export class IssueParser { export class IssueParser {
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue { parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
const fields = issue.body.split('###') const fields = issue.body.split('###')
const data = new Dictionary() const data = new Dictionary()
fields.forEach((field: string) => { fields.forEach((field: string) => {
let parsed = field.split(/\r?\n/).filter(Boolean) const parsed = field.split(/\r?\n/).filter(Boolean)
let _label = parsed.shift() let _label = parsed.shift()
_label = _label ? _label.trim() : '' _label = _label ? _label.trim() : ''
let _value = parsed.join('\r\n') let _value = parsed.join('\r\n')
_value = _value ? _value.trim() : '' _value = _value ? _value.trim() : ''
if (!_label || !_value) return data if (!_label || !_value) return data
const id: string = FIELDS.get(_label) const id: string = FIELDS.get(_label)
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
if (!id) return if (!id) return
data.set(id, value) data.set(id, value)
}) })
const labels = issue.labels.map(label => label.name) const labels = issue.labels.map(label => label.name)
return new Issue({ number: issue.number, labels, data }) return new Issue({ number: issue.number, labels, data })
} }
} }

View file

@ -1,13 +1,13 @@
import markdownInclude from 'markdown-include' import markdownInclude from 'markdown-include'
export class Markdown { export class Markdown {
filepath: string filepath: string
constructor(filepath: string) { constructor(filepath: string) {
this.filepath = filepath this.filepath = filepath
} }
compile() { compile() {
markdownInclude.compileFiles(this.filepath) markdownInclude.compileFiles(this.filepath)
} }
} }

View file

@ -1,7 +1,7 @@
import { Storage, Collection, DateTime, Logger } from '@freearhey/core' import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
import { ChannelsParser, ConfigLoader, ApiChannel, Queue } from './' import { ChannelsParser, ConfigLoader, ApiChannel, Queue } from './'
import { SITES_DIR, DATA_DIR } from '../constants' import { SITES_DIR, DATA_DIR } from '../constants'
import { Channel, SiteConfig } from 'epg-grabber' import { SiteConfig } from 'epg-grabber'
import path from 'path' import path from 'path'
import { GrabOptions } from '../commands/epg/grab' import { GrabOptions } from '../commands/epg/grab'

View file

@ -1,2 +1,2 @@
export * from './issue' export * from './issue'
export * from './site' export * from './site'

View file

@ -1,24 +1,24 @@
import { Dictionary } from '@freearhey/core' import { Dictionary } from '@freearhey/core'
import { OWNER, REPO } from '../constants' import { OWNER, REPO } from '../constants'
type IssueProps = { type IssueProps = {
number: number number: number
labels: string[] labels: string[]
data: Dictionary data: Dictionary
} }
export class Issue { export class Issue {
number: number number: number
labels: string[] labels: string[]
data: Dictionary data: Dictionary
constructor({ number, labels, data }: IssueProps) { constructor({ number, labels, data }: IssueProps) {
this.number = number this.number = number
this.labels = labels this.labels = labels
this.data = data this.data = data
} }
getURL() { getURL() {
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}` return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
} }
} }

View file

@ -1,57 +1,57 @@
import { Collection } from '@freearhey/core' import { Collection } from '@freearhey/core'
import { Issue } from './' import { Issue } from './'
enum StatusCode { enum StatusCode {
DOWN = 'down', DOWN = 'down',
WARNING = 'warning', WARNING = 'warning',
OK = 'ok' OK = 'ok'
} }
type Status = { type Status = {
code: StatusCode code: StatusCode
emoji: string emoji: string
} }
type SiteProps = { type SiteProps = {
domain: string domain: string
issues: Collection issues: Collection
} }
export class Site { export class Site {
domain: string domain: string
issues: Collection issues: Collection
constructor({ domain, issues }: SiteProps) { constructor({ domain, issues }: SiteProps) {
this.domain = domain this.domain = domain
this.issues = issues this.issues = issues
} }
getStatus(): Status { getStatus(): Status {
const issuesWithStatusDown = this.issues.filter((issue: Issue) => const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
issue.labels.find(label => label === 'status:down') issue.labels.find(label => label === 'status:down')
) )
if (issuesWithStatusDown.notEmpty()) if (issuesWithStatusDown.notEmpty())
return { return {
code: StatusCode.DOWN, code: StatusCode.DOWN,
emoji: '🔴' emoji: '🔴'
} }
const issuesWithStatusWarning = this.issues.filter((issue: Issue) => const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
issue.labels.find(label => label === 'status:warning') issue.labels.find(label => label === 'status:warning')
) )
if (issuesWithStatusWarning.notEmpty()) if (issuesWithStatusWarning.notEmpty())
return { return {
code: StatusCode.WARNING, code: StatusCode.WARNING,
emoji: '🟡' emoji: '🟡'
} }
return { return {
code: StatusCode.OK, code: StatusCode.OK,
emoji: '🟢' emoji: '🟢'
} }
} }
getIssues(): Collection { getIssues(): Collection {
return this.issues.map((issue: Issue) => issue.getURL()) return this.issues.map((issue: Issue) => issue.getURL())
} }
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,36 +1,36 @@
import { execSync } from 'child_process' import { execSync } from 'child_process'
import fs from 'fs-extra' import fs from 'fs-extra'
import path from 'path' import path from 'path'
beforeEach(() => { beforeEach(() => {
fs.emptyDirSync('tests/__data__/output') fs.emptyDirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/output/.sites') fs.mkdirSync('tests/__data__/output/.sites')
fs.copyFileSync( fs.copyFileSync(
'tests/__data__/input/.sites/config.json', 'tests/__data__/input/.sites/config.json',
'tests/__data__/output/.sites/config.json' 'tests/__data__/output/.sites/config.json'
) )
fs.copyFileSync( fs.copyFileSync(
'tests/__data__/input/.sites/template.md', 'tests/__data__/input/.sites/template.md',
'tests/__data__/output/.sites/template.md' 'tests/__data__/output/.sites/template.md'
) )
}) })
it('can update SITES.md', () => { it('can update SITES.md', () => {
const stdout = execSync('DOT_SITES_DIR=tests/__data__/output/.sites npm run sites:update', { execSync('DOT_SITES_DIR=tests/__data__/output/.sites npm run sites:update', {
encoding: 'utf8' encoding: 'utf8'
}) })
expect(content('tests/__data__/output/sites.md')).toEqual( expect(content('tests/__data__/output/sites.md')).toEqual(
content('tests/__data__/expected/_sites.md') content('tests/__data__/expected/_sites.md')
) )
expect(true).toBe(true) expect(true).toBe(true)
}) })
function content(filepath: string) { function content(filepath: string) {
const data = fs.readFileSync(path.resolve(filepath), { const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8' encoding: 'utf8'
}) })
return JSON.stringify(data) return JSON.stringify(data)
} }