mirror of
https://github.com/iptv-org/epg.git
synced 2025-05-09 08:30:06 -04:00
Fixes linter issues
This commit is contained in:
parent
60f3604ea5
commit
5b280dfbc8
17 changed files with 2895 additions and 2894 deletions
|
@ -1,51 +1,51 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import path from 'path'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
type OutputItem = {
|
||||
channel: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({ storage: sitesStorage })
|
||||
|
||||
let files: string[] = []
|
||||
files = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
}
|
||||
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
|
||||
const output = parsedChannels.map((channel: Channel): OutputItem => {
|
||||
return {
|
||||
channel: channel.xmltv_id || null,
|
||||
site: channel.site || '',
|
||||
site_id: channel.site_id || '',
|
||||
site_name: channel.name,
|
||||
lang: channel.lang || ''
|
||||
}
|
||||
})
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import path from 'path'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
type OutputItem = {
|
||||
channel: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({ storage: sitesStorage })
|
||||
|
||||
let files: string[] = []
|
||||
files = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
}
|
||||
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
|
||||
const output = parsedChannels.map((channel: Channel): OutputItem => {
|
||||
return {
|
||||
channel: channel.xmltv_id || null,
|
||||
site: channel.site || '',
|
||||
site_id: channel.site_id || '',
|
||||
site_name: channel.name,
|
||||
lang: channel.lang || ''
|
||||
}
|
||||
})
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
|
|
@ -43,7 +43,7 @@ async function main() {
|
|||
const channelsIndex = sj.createIndex(channelsContent)
|
||||
|
||||
const buffer = new Dictionary()
|
||||
for (let option of options.all()) {
|
||||
for (const option of options.all()) {
|
||||
const channel: Channel = option.channel
|
||||
if (channel.xmltv_id) {
|
||||
if (channel.xmltv_id !== '-') {
|
||||
|
@ -150,7 +150,7 @@ function getOptions(channelsIndex, channel: Channel) {
|
|||
const query = channel.name
|
||||
.replace(/\s(SD|TV|HD|SD\/HD|HDTV)$/i, '')
|
||||
.replace(/(\(|\)|,)/gi, '')
|
||||
.replace(/\-/gi, ' ')
|
||||
.replace(/-/gi, ' ')
|
||||
.replace(/\+/gi, '')
|
||||
const similar = channelsIndex.search(query).map(item => new ApiChannel(item))
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ async function main() {
|
|||
const logger = new Logger()
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = require(path.resolve(options.config))
|
||||
const config = (await import(path.resolve(options.config))).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channels = new Collection()
|
||||
|
|
|
@ -47,7 +47,6 @@ async function main() {
|
|||
|
||||
const parsedChannels = await parser.parse(filepath)
|
||||
|
||||
const bufferById = new Dictionary()
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
parsedChannels.forEach((channel: Channel) => {
|
||||
|
|
|
@ -1,58 +1,58 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { IssueLoader, HTMLTable, Markdown } from '../../core'
|
||||
import { Issue, Site } from '../../models'
|
||||
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await storage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loadIssues(loader)
|
||||
|
||||
logger.info('putting the data together...')
|
||||
folders.forEach((domain: string) => {
|
||||
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
sites.add(site)
|
||||
})
|
||||
|
||||
logger.info('creating sites table...')
|
||||
let data = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
data.add([
|
||||
`<a href="sites/${site.domain}">${site.domain}</a>`,
|
||||
site.getStatus().emoji,
|
||||
site.getIssues().all().join(', ')
|
||||
])
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
|
||||
|
||||
const readmeStorage = new Storage(DOT_SITES_DIR)
|
||||
await readmeStorage.save('_table.md', table.toString())
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const configPath = path.join(DOT_SITES_DIR, 'config.json')
|
||||
const sitesMarkdown = new Markdown(configPath)
|
||||
sitesMarkdown.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadIssues(loader: IssueLoader) {
|
||||
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
|
||||
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
|
||||
|
||||
return issuesWithStatusWarning.concat(issuesWithStatusDown)
|
||||
}
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { IssueLoader, HTMLTable, Markdown } from '../../core'
|
||||
import { Issue, Site } from '../../models'
|
||||
import { SITES_DIR, DOT_SITES_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await storage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loadIssues(loader)
|
||||
|
||||
logger.info('putting the data together...')
|
||||
folders.forEach((domain: string) => {
|
||||
const filteredIssues = issues.filter((issue: Issue) => domain === issue.data.get('site'))
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
sites.add(site)
|
||||
})
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const data = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
data.add([
|
||||
`<a href="sites/${site.domain}">${site.domain}</a>`,
|
||||
site.getStatus().emoji,
|
||||
site.getIssues().all().join(', ')
|
||||
])
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [{ name: 'Site' }, { name: 'Status' }, { name: 'Notes' }])
|
||||
|
||||
const readmeStorage = new Storage(DOT_SITES_DIR)
|
||||
await readmeStorage.save('_table.md', table.toString())
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const configPath = path.join(DOT_SITES_DIR, 'config.json')
|
||||
const sitesMarkdown = new Markdown(configPath)
|
||||
sitesMarkdown.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadIssues(loader: IssueLoader) {
|
||||
const issuesWithStatusWarning = await loader.load({ labels: ['broken guide', 'status:warning'] })
|
||||
const issuesWithStatusDown = await loader.load({ labels: ['broken guide', 'status:down'] })
|
||||
|
||||
return issuesWithStatusWarning.concat(issuesWithStatusDown)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
import dayjs from 'dayjs'
|
||||
import utc from 'dayjs/plugin/utc'
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = {}
|
||||
|
@ -10,4 +11,4 @@ date.getUTC = function (d = null) {
|
|||
return dayjs.utc().startOf('d')
|
||||
}
|
||||
|
||||
module.exports = date
|
||||
export default date
|
||||
|
|
|
@ -1,46 +1,46 @@
|
|||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
type DataItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (const item of this.data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (const prop in item) {
|
||||
const column = this.columns[i]
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
type DataItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (const item of this.data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (const prop in item) {
|
||||
const column = this.columns[i]
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,40 +1,41 @@
|
|||
import { Collection } from '@freearhey/core'
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load({ labels }: { labels: string[] | string }) {
|
||||
labels = Array.isArray(labels) ? labels.join(',') : labels
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
switch (labels) {
|
||||
case 'broken guide,status:warning':
|
||||
issues = require('../../tests/__data__/input/issues/broken_guide_warning.js')
|
||||
break
|
||||
case 'broken guide,status:down':
|
||||
issues = require('../../tests/__data__/input/issues/broken_guide_down.js')
|
||||
break
|
||||
}
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load({ labels }: { labels: string[] | string }) {
|
||||
labels = Array.isArray(labels) ? labels.join(',') : labels
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
switch (labels) {
|
||||
case 'broken guide,status:warning':
|
||||
issues = (await import('../../tests/__data__/input/issues/broken_guide_warning.js'))
|
||||
.default
|
||||
break
|
||||
case 'broken guide,status:down':
|
||||
issues = (await import('../../tests/__data__/input/issues/broken_guide_down.js')).default
|
||||
break
|
||||
}
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +1,34 @@
|
|||
import { Dictionary } from '@freearhey/core'
|
||||
import { Issue } from '../models'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
Site: 'site'
|
||||
})
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
|
||||
const fields = issue.body.split('###')
|
||||
|
||||
const data = new Dictionary()
|
||||
fields.forEach((field: string) => {
|
||||
let parsed = field.split(/\r?\n/).filter(Boolean)
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.trim() : ''
|
||||
let _value = parsed.join('\r\n')
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
const labels = issue.labels.map(label => label.name)
|
||||
|
||||
return new Issue({ number: issue.number, labels, data })
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { Issue } from '../models'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
Site: 'site'
|
||||
})
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
|
||||
const fields = issue.body.split('###')
|
||||
|
||||
const data = new Dictionary()
|
||||
fields.forEach((field: string) => {
|
||||
const parsed = field.split(/\r?\n/).filter(Boolean)
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.trim() : ''
|
||||
let _value = parsed.join('\r\n')
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
const labels = issue.labels.map(label => label.name)
|
||||
|
||||
return new Issue({ number: issue.number, labels, data })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import markdownInclude from 'markdown-include'
|
||||
|
||||
export class Markdown {
|
||||
filepath: string
|
||||
|
||||
constructor(filepath: string) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
compile() {
|
||||
markdownInclude.compileFiles(this.filepath)
|
||||
}
|
||||
}
|
||||
import markdownInclude from 'markdown-include'
|
||||
|
||||
export class Markdown {
|
||||
filepath: string
|
||||
|
||||
constructor(filepath: string) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
compile() {
|
||||
markdownInclude.compileFiles(this.filepath)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
||||
import { ChannelsParser, ConfigLoader, ApiChannel, Queue } from './'
|
||||
import { SITES_DIR, DATA_DIR } from '../constants'
|
||||
import { Channel, SiteConfig } from 'epg-grabber'
|
||||
import { SiteConfig } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
export * from './issue'
|
||||
export * from './site'
|
||||
export * from './issue'
|
||||
export * from './site'
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
import { Dictionary } from '@freearhey/core'
|
||||
import { OWNER, REPO } from '../constants'
|
||||
|
||||
type IssueProps = {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { OWNER, REPO } from '../constants'
|
||||
|
||||
type IssueProps = {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,57 +1,57 @@
|
|||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
type Status = {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
type SiteProps = {
|
||||
domain: string
|
||||
issues: Collection
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
issues: Collection
|
||||
|
||||
constructor({ domain, issues }: SiteProps) {
|
||||
this.domain = domain
|
||||
this.issues = issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.notEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.notEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssues(): Collection {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
type Status = {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
type SiteProps = {
|
||||
domain: string
|
||||
issues: Collection
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
issues: Collection
|
||||
|
||||
constructor({ domain, issues }: SiteProps) {
|
||||
this.domain = domain
|
||||
this.issues = issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.notEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.notEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssues(): Collection {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue