Delete unused scripts

This commit is contained in:
Aleksandr Statciuk 2023-05-15 18:19:53 +03:00
parent e68df665c2
commit 9cfc51cac3
7 changed files with 0 additions and 576 deletions

View file

@ -1,31 +0,0 @@
const { file, parser, logger } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const OUTPUT_DIR = process.env.OUTPUT_DIR || '.api'
async function main() {
let guides = []
const logPath = `${LOGS_DIR}/guides/update.log`
const results = await parser.parseLogs(logPath)
for (const result of results) {
guides.push({
channel: result.channel,
site: result.site,
lang: result.lang,
days: result.days,
url: `https://iptv-org.github.io/epg/guides/${result.filename}.xml`
})
}
guides = _.sortBy(guides, 'channel')
const outputFilepath = `${OUTPUT_DIR}/guides.json`
await file.create(outputFilepath, JSON.stringify(guides))
logger.info(`saved to "${outputFilepath}"...`)
}
main()

View file

@ -1,89 +0,0 @@
const _ = require('lodash')
const { EPGGrabber, Channel } = require('epg-grabber')
const { program } = require('commander')
const { db, logger, timer, file, parser } = require('../../core')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
dayjs.extend(utc)
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
.option('--delay <delay>', 'Delay between requests (in mileseconds)', parser.parseNumber)
.option(
'-t, --timeout <timeout>',
'Set a timeout for each request (in mileseconds)',
parser.parseNumber
)
.option('--debug', 'Enable debug mode', false)
.parse(process.argv)
.opts()
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const CLUSTER_PATH = `${LOGS_DIR}/cluster/load/cluster_${options.clusterId}.log`
async function main() {
logger.info('Starting...')
timer.start()
logger.info(`Loading cluster: ${options.clusterId}`)
logger.info(`Creating '${CLUSTER_PATH}'...`)
await file.create(CLUSTER_PATH)
await db.queue.load()
let items = await db.queue.find({ cluster_id: options.clusterId })
items = _.orderBy(items, [i => i.channel.id.toLowerCase(), 'date'])
const total = items.length
logger.info('Loading...')
let i = 1
let totalPrograms = 0
let config = require(file.resolve(items[0].configPath))
config = _.merge(config, {
debug: options.debug,
delay: options.delay,
request: {
timeout: options.timeout
}
})
const grabber = new EPGGrabber(config)
for (const item of items) {
const channel = new Channel(item.channel)
await new Promise(resolve => {
grabber.grab(channel, item.date, async (data, err) => {
logger.info(
`[${i}/${total}] ${channel.site} (${channel.lang}) - ${channel.id} - ${dayjs
.utc(data.date)
.format('MMM D, YYYY')} (${data.programs.length} programs)`
)
if (err) logger.error(err.message)
const result = {
_qid: item._id,
programs: data.programs,
error: err ? err.message : null
}
await file.append(CLUSTER_PATH, JSON.stringify(result) + '\n')
totalPrograms += data.programs.length
if (i < total) i++
resolve()
})
})
}
db.queue.compact()
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`)
if (totalPrograms === 0) {
logger.error('\nError: No programs found')
process.exit(1)
}
}
main()

View file

@ -1,115 +0,0 @@
const { Octokit } = require('@octokit/core')
const dayjs = require('dayjs')
const isToday = require('dayjs/plugin/isToday')
const utc = require('dayjs/plugin/utc')
const unzipit = require('unzipit')
const { file, logger } = require('../../core')
dayjs.extend(isToday)
dayjs.extend(utc)
const DB_DIR = process.env.DB_DIR || './scripts/database'
const programsPath = `${DB_DIR}/programs.db`
const queuePath = `${DB_DIR}/queue.db`
const octokit = new Octokit({
auth: process.env.GITHUB_TOKEN
})
async function main() {
try {
let workflows = await getWorkflows()
logger.info(`found ${workflows.length} workflows\r\n`)
await file.create(programsPath)
await file.create(queuePath)
const total = workflows.length
for (let [i, workflow] of workflows.entries()) {
logger.info(`[${i + 1}/${total}] ${workflow.name}`)
const run = await getWorkflowRun(workflow)
if (!run) continue
let artifact = await getRunArtifacts(run)
const programsBuffer = await downloadArtifact(artifact, 'programs.db')
await file.append(programsPath, programsBuffer)
const queueBuffer = await downloadArtifact(artifact, 'queue.db')
await file.append(queuePath, queueBuffer)
}
} catch (err) {
console.log(err.message)
}
}
main()
async function downloadArtifact(artifact, filename) {
let results = await octokit.request(
'GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}',
{
owner: 'iptv-org',
repo: 'epg',
artifact_id: artifact.id,
archive_format: 'zip'
}
)
const { entries } = await unzipit.unzip(results.data)
const arrayBuffer = await entries[filename].arrayBuffer()
return toString(arrayBuffer)
}
async function getRunArtifacts(run) {
let results = await octokit.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
owner: 'iptv-org',
repo: 'epg',
run_id: run.id
})
return results.data.artifacts.find(a => a.name === 'database')
}
async function getWorkflowRun(workflow) {
let today = dayjs.utc().subtract(1, 'd').format('YYYY-MM-DD')
let results = await octokit.request(
'GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs',
{
owner: 'iptv-org',
repo: 'epg',
workflow_id: workflow.id,
status: 'success',
created: `>=${today}`
}
)
return results.data.workflow_runs.find(
r => r.event === 'schedule' || r.event === 'workflow_dispatch'
)
}
async function getWorkflows() {
let workflows = []
for (let page of [1, 2, 3]) {
try {
let results = await octokit.request('GET /repos/{owner}/{repo}/actions/workflows', {
owner: 'iptv-org',
repo: 'epg',
per_page: 100,
page
})
workflows = workflows.concat(results.data.workflows)
} catch (err) {
console.log(err.message)
}
}
return workflows.filter(w => !/^_/.test(w.name) && w.name !== 'pages-build-deployment')
}
function toString(arrayBuffer) {
return new TextDecoder().decode(arrayBuffer)
}

View file

@ -1,117 +0,0 @@
const { db, api, logger, file, zip } = require('../../core')
const { generateXMLTV, Program, Channel } = require('epg-grabber')
const _ = require('lodash')
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const CURR_DATE = process.env.CURR_DATE || new Date()
const logPath = `${LOGS_DIR}/guides/update.log`
let api_channels = {}
let db_queue = []
let db_programs = []
let guides = []
async function main() {
logger.info(`starting...`)
logger.info('loading data/channels.json...')
await api.channels.load()
api.channels.all().forEach(channel => {
api_channels[channel.id] = channel
})
logger.info('loading database/queue.db...')
await db.queue.load()
db_queue = await db.queue.find({})
logger.info(`found ${db_queue.length} items`)
logger.info('loading database/programs.db...')
await db.programs.load()
db_programs = await db.programs.find({})
logger.info(`found ${db_programs.length} programs`)
await generate()
logger.info(`creating ${logPath}...`)
await file.create(logPath, guides.map(g => JSON.stringify(g)).join('\r\n'))
logger.info('finished')
}
main()
async function generate() {
let queue = _.uniqBy(db_queue, i => i.channel.lang + i.channel.id + i.channel.site)
queue = _.groupBy(queue, i => (i.channel ? `${i.channel.lang}/${i.channel.site}` : `_`))
delete queue['_']
let programs = _.groupBy(db_programs, p =>
p.titles.length ? `${p.titles[0].lang}/${p.site}` : `_`
)
delete programs['_']
for (let filename in queue) {
if (!queue[filename]) continue
const channels = queue[filename].map(i => {
const channelData = api_channels[i.channel.id]
channelData.site = i.channel.site
channelData.site_id = i.channel.site_id
channelData.lang = i.channel.lang
return new Channel(channelData)
})
await save(filename, channels, programs[filename])
for (let channel of channels) {
const configPath = `sites/${channel.site}/${channel.site}.config.js`
const config = require(file.resolve(configPath))
guides.push({
site: channel.site,
lang: channel.lang,
days: config.days,
channel: channel.id,
filename
})
}
}
}
async function save(filepath, channels, programs = []) {
let output = {
channels,
programs: [],
date: CURR_DATE
}
for (let programData of programs) {
let channel = channels.find(c => c.id === programData.channel)
if (!channel) continue
let program = new Program(programData, channel)
output.programs.push(program)
}
output.channels = _.sortBy(output.channels, 'id')
output.channels = _.uniqBy(output.channels, 'id')
output.programs = _.sortBy(output.programs, ['channel', 'start'])
output.programs = _.uniqBy(output.programs, p => p.channel + p.start)
const xmlFilepath = `${PUBLIC_DIR}/guides/${filepath}.xml`
const gzFilepath = `${PUBLIC_DIR}/guides/${filepath}.xml.gz`
logger.info(`creating ${xmlFilepath}...`)
const xmltv = generateXMLTV(output)
await file.create(xmlFilepath, xmltv)
logger.info(`creating ${gzFilepath}...`)
const compressed = await zip.compress(xmltv)
await file.create(gzFilepath, compressed)
return output
}

View file

@ -1,35 +0,0 @@
const { db, logger, file, parser } = require('../../core')
const { Program, Channel } = require('epg-grabber')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
async function main() {
await db.queue.load()
await db.programs.load()
await db.programs.reset()
const files = await file.list(`${LOGS_DIR}/cluster/load/cluster_*.log`)
for (const filepath of files) {
logger.info(`Parsing "${filepath}"...`)
const results = await parser.parseLogs(filepath)
for (const result of results) {
const queue = await db.queue.find({ _id: result._qid }).limit(1)
if (!queue.length) continue
const item = queue[0]
const c = new Channel(item.channel)
const programs = result.programs.map(p => {
p = new Program(p, c)
p._qid = result._qid
return p
})
await db.programs.insert(programs)
await db.queue.update({ _id: result._qid }, { $set: { error: result.error } })
}
}
await db.queue.compact()
}
main()

View file

@ -1,103 +0,0 @@
const { db, file, parser, logger, date, api } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
256
)
.parse(process.argv)
.opts()
const CHANNELS_PATH = process.env.CHANNELS_PATH || 'sites/**/*.channels.xml'
const CURR_DATE = process.env.CURR_DATE || new Date()
async function main() {
logger.info('Starting...')
logger.info(`Number of clusters: ${options.maxClusters}`)
await saveToDatabase(await createQueue())
logger.info('Done')
}
main()
async function createQueue() {
logger.info(`Create queue...`)
let queue = {}
await api.channels.load().catch(console.error)
const files = await file.list(CHANNELS_PATH).catch(console.error)
const utcDate = date.getUTC(CURR_DATE)
for (const filepath of files) {
try {
const dir = file.dirname(filepath)
const { site, channels } = await parser.parseChannels(filepath)
if (!site) continue
const configPath = `${dir}/${site}.config.js`
const config = require(file.resolve(configPath))
if (config.skip) continue
const filename = file.basename(filepath)
const days = config.days || 1
const dates = Array.from({ length: days }, (_, i) => utcDate.add(i, 'd'))
for (const channel of channels) {
if (!channel.site || !channel.id) continue
const found = api.channels.find({ id: channel.id })
if (!found) continue
channel.logo = found.logo
for (const d of dates) {
const dString = d.toJSON()
const key = `${channel.site}:${channel.lang}:${channel.id}:${dString}`
if (!queue[key]) {
queue[key] = {
channel,
date: dString,
configPath,
error: null
}
}
}
}
} catch (err) {
console.error(err)
continue
}
}
queue = Object.values(queue)
logger.info(`Added ${queue.length} items`)
return queue
}
async function saveToDatabase(items = []) {
logger.info('Saving to the database...')
await db.queue.load()
await db.queue.reset()
let queue = []
const chunks = split(_.shuffle(items), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const item of chunk) {
item.cluster_id = i + 1
queue.push(item)
}
}
queue = _.sortBy(queue, ['channel.lang', 'channel.xmltv_id', 'date'])
await db.queue.insert(queue)
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}

View file

@ -1,86 +0,0 @@
const { file, markdown, parser, logger, api, table } = require('../../core')
const { program } = require('commander')
const langs = require('langs')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
const options = program
.option('-c, --config <config>', 'Set path to config file', '.readme/readme.json')
.parse(process.argv)
.opts()
async function main() {
await api.countries.load().catch(console.error)
const logPath = `${LOGS_DIR}/guides/update.log`
let log = await parser.parseLogs(logPath)
await createTable(log)
await updateReadme()
}
main()
async function createTable(log) {
let files = log.reduce((acc, curr) => {
if (!acc[curr.filename]) {
acc[curr.filename] = {
site: curr.site,
lang: curr.lang,
channels: 0,
filename: curr.filename
}
}
acc[curr.filename].channels++
return acc
}, {})
let groups = {}
for (const filename in files) {
const item = files[filename]
const lang = langs.where('1', item.lang)
if (!lang) continue
if (!groups[lang.name]) groups[lang.name] = { lang: lang.name, data: [] }
groups[lang.name].data.push([
`<a href="https://${item.site}">${item.site}</a>`,
item.channels,
`<code>https://iptv-org.github.io/epg/guides/${filename}.xml</code>`,
`<a href="https://github.com/iptv-org/epg/actions/workflows/${item.site}.yml"><img src="https://github.com/iptv-org/epg/actions/workflows/${item.site}.yml/badge.svg" alt="${item.site}" style="max-width: 100%;"></a>`
])
}
groups = _.sortBy(Object.values(groups), 'lang')
let guides = ''
for (let group of groups) {
let lang = group.lang
let data = group.data
data = _.orderBy(data, [item => item[0], item => item[1]], ['asc', 'desc'])
data = Object.values(_.groupBy(data, item => item[0]))
guides += `### ${lang}\r\n\r\n`
guides += table.create(data, [
'Site&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;',
'Channels',
'EPG&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;',
'Status&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;'
])
guides += `\r\n\r\n`
}
await file.create('./.readme/_guides.md', guides)
}
async function updateReadme() {
logger.info('updating readme.md...')
const config = require(file.resolve(options.config))
await file.createDir(file.dirname(config.build))
await markdown.compile(options.config)
}