This commit is contained in:
Aleksandr Statciuk 2022-01-09 18:15:38 +03:00
parent d068f5083f
commit 557a05ddac
13 changed files with 3046 additions and 142057 deletions

View file

@ -46,12 +46,12 @@ async function loadChannels() {
async function saveToDatabase() { async function saveToDatabase() {
logger.info('Saving to the database...') logger.info('Saving to the database...')
await db.reset() await db.channels.reset()
const chunks = split(_.shuffle(channels), options.maxClusters) const chunks = split(_.shuffle(channels), options.maxClusters)
for (const [i, chunk] of chunks.entries()) { for (const [i, chunk] of chunks.entries()) {
for (const item of chunk) { for (const item of chunk) {
item.cluster_id = i + 1 item.cluster_id = i + 1
await db.insert(item) await db.channels.insert(item)
} }
} }
} }

View file

@ -1,115 +0,0 @@
const { db, logger, file, parser } = require('../core')
const _ = require('lodash')
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
const OUTPUT_PATH = process.env.OUTPUT_PATH || 'scripts/output'
let channels = []
let programs = []
async function main() {
await setUp()
await createChannelsJson()
await createProgramsJson()
}
main()
async function createChannelsJson() {
logger.info('Creating channels.json...')
let items = channels
items = _.sortBy(items, item => item.name)
let buffer = {}
items.forEach(item => {
if (!buffer[item.xmltv_id]) {
const countryCode = item.xmltv_id.split('.')[1]
buffer[item.xmltv_id] = {
id: item.xmltv_id,
name: [item.name],
logo: item.logo || null,
country: countryCode ? countryCode.toUpperCase() : null
}
} else {
if (!buffer[item.xmltv_id].logo && item.logo) {
buffer[item.xmltv_id].logo = item.logo
}
if (!buffer[item.xmltv_id].name.includes(item.name)) {
buffer[item.xmltv_id].name.push(item.name)
}
}
})
items = Object.values(buffer)
await file.create(`${OUTPUT_PATH}/channels.json`, JSON.stringify(items, null, 2))
}
async function createProgramsJson() {
logger.info('Creating programs.json...')
let items = programs
items = _.sortBy(items, ['channel', 'start'])
items = _.groupBy(items, 'channel')
for (let channel in items) {
let programs = items[channel]
programs = Object.values(_.groupBy(programs, i => i.site))[0]
let slots = _.groupBy(programs, i => `${i.start}_${i.stop}`)
for (let slotId in slots) {
let program = {
channel,
site: null,
title: [],
description: [],
categories: [],
icons: [],
start: null,
stop: null
}
slots[slotId].forEach(item => {
program.site = item.site
if (item.title) program.title.push({ lang: item.lang, value: item.title })
if (item.description)
program.description.push({
lang: item.lang,
value: item.description
})
if (item.category) program.categories.push({ lang: item.lang, value: item.category })
if (item.icon) program.icons.push(item.icon)
program.start = item.start
program.stop = item.stop
})
slots[slotId] = program
}
items[channel] = Object.values(slots)
}
// console.log(items)
await file.create(`${OUTPUT_PATH}/programs.json`, JSON.stringify(items, null, 2))
}
async function setUp() {
channels = await db.find({})
const files = await file.list(`${LOGS_PATH}/load-cluster/cluster_*.log`)
for (const filepath of files) {
const results = await parser.parseLogs(filepath)
results.forEach(result => {
let pm = result.programs.map(p => {
p.site = result.site
return p
})
programs = programs.concat(pm)
})
}
}

View file

@ -17,6 +17,88 @@ async function main() {
main() main()
async function createChannelsJson() {
logger.info('Creating channels.json...')
let items = channels
items = _.sortBy(items, item => item.name)
let buffer = {}
items.forEach(item => {
if (!buffer[item.xmltv_id]) {
const countryCode = item.xmltv_id.split('.')[1]
buffer[item.xmltv_id] = {
id: item.xmltv_id,
name: [item.name],
logo: item.logo || null,
country: countryCode ? countryCode.toUpperCase() : null
}
} else {
if (!buffer[item.xmltv_id].logo && item.logo) {
buffer[item.xmltv_id].logo = item.logo
}
if (!buffer[item.xmltv_id].name.includes(item.name)) {
buffer[item.xmltv_id].name.push(item.name)
}
}
})
items = Object.values(buffer)
await file.create(`${OUTPUT_PATH}/channels.json`, JSON.stringify(items, null, 2))
}
async function createProgramsJson() {
logger.info('Creating programs.json...')
let items = programs
items = _.sortBy(items, ['channel', 'start'])
items = _.groupBy(items, 'channel')
for (let channel in items) {
let programs = items[channel]
programs = Object.values(_.groupBy(programs, i => i.site))[0]
let slots = _.groupBy(programs, i => `${i.start}_${i.stop}`)
for (let slotId in slots) {
let program = {
channel,
site: null,
title: [],
description: [],
categories: [],
icons: [],
start: null,
stop: null
}
slots[slotId].forEach(item => {
program.site = item.site
if (item.title) program.title.push({ lang: item.lang, value: item.title })
if (item.description)
program.description.push({
lang: item.lang,
value: item.description
})
if (item.category) program.categories.push({ lang: item.lang, value: item.category })
if (item.icon) program.icons.push(item.icon)
program.start = item.start
program.stop = item.stop
})
slots[slotId] = program
}
items[channel] = Object.values(slots)
}
// console.log(items)
await file.create(`${OUTPUT_PATH}/programs.json`, JSON.stringify(items, null, 2))
}
async function generateGuideXML() { async function generateGuideXML() {
logger.info(`Generating guide.xml...`) logger.info(`Generating guide.xml...`)

View file

@ -18,7 +18,7 @@ async function main() {
logger.info(`Loading cluster: ${options.clusterId}`) logger.info(`Loading cluster: ${options.clusterId}`)
logger.info(`Creating '${clusterLog}'...`) logger.info(`Creating '${clusterLog}'...`)
await file.create(clusterLog) await file.create(clusterLog)
const items = await db.find({ cluster_id: options.clusterId }) const items = await db.channels.find({ cluster_id: options.clusterId })
const days = options.days || 1 const days = options.days || 1
const total = days * items.length const total = days * items.length
logger.info(`Total ${total} requests`) logger.info(`Total ${total} requests`)

View file

@ -0,0 +1,22 @@
const { db, logger, file, parser } = require('../core')
const _ = require('lodash')
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
async function main() {
db.programs.reset()
const files = await file.list(`${LOGS_PATH}/load-cluster/cluster_*.log`)
for (const filepath of files) {
const results = await parser.parseLogs(filepath)
results.forEach(result => {
const programs = result.programs.map(p => {
p.site = result.site
return p
})
db.programs.insert(programs)
})
}
db.programs.compact()
}
main()

View file

@ -1,61 +1,69 @@
const Database = require('nedb-promises') const nedb = require('nedb-promises')
const file = require('./file') const file = require('./file')
const DB_FILEPATH = process.env.DB_FILEPATH || './scripts/channels.db' const DB_DIR = process.env.DB_DIR || './scripts/database'
const nedb = Database.create({ class Database {
filename: file.resolve(DB_FILEPATH), constructor(filepath) {
autoload: true, this.filepath = filepath
onload(err) { this.db = nedb.create({
if (err) console.error(err) filename: file.resolve(filepath),
}, autoload: true,
compareStrings: (a, b) => { onload: err => {
a = a.replace(/\s/g, '_') if (err) console.error(err)
b = b.replace(/\s/g, '_') },
compareStrings: (a, b) => {
a = a.replace(/\s/g, '_')
b = b.replace(/\s/g, '_')
return a.localeCompare(b, undefined, { return a.localeCompare(b, undefined, {
sensitivity: 'accent', sensitivity: 'accent',
numeric: true numeric: true
})
}
}) })
} }
})
removeIndex(field) {
return this.db.removeIndex(field)
}
addIndex(options) {
return this.db.ensureIndex(options)
}
compact() {
return this.db.persistence.compactDatafile()
}
reset() {
return file.clear(this.filepath)
}
count(query) {
return this.db.count(query)
}
insert(doc) {
return this.db.insert(doc)
}
update(query, update) {
return this.db.update(query, update)
}
find(query) {
return this.db.find(query)
}
remove(query, options) {
return this.db.remove(query, options)
}
}
const db = {} const db = {}
db.removeIndex = function (field) { db.channels = new Database(`${DB_DIR}/channels.db`)
return nedb.removeIndex(field) db.programs = new Database(`${DB_DIR}/programs.db`)
}
db.addIndex = function (options) {
return nedb.ensureIndex(options)
}
db.compact = function () {
return nedb.persistence.compactDatafile()
}
db.reset = function () {
return file.clear(DB_FILEPATH)
}
db.count = function (query) {
return nedb.count(query)
}
db.insert = function (doc) {
return nedb.insert(doc)
}
db.update = function (query, update) {
return nedb.update(query, update)
}
db.find = function (query) {
return nedb.find(query)
}
db.remove = function (query, options) {
return nedb.remove(query, options)
}
module.exports = db module.exports = db

File diff suppressed because it is too large Load diff

2875
scripts/database/programs.db Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -7,13 +7,13 @@ beforeEach(() => {
fs.mkdirSync('tests/__data__/output') fs.mkdirSync('tests/__data__/output')
}) })
it('can create database', () => { it('can create channels database', () => {
const results = execSync( const results = execSync(
'DB_FILEPATH=tests/__data__/output/test.db node scripts/commands/create-database.js --channels=tests/__data__/input/site.channels.xml --max-clusters=1', 'DB_DIR=tests/__data__/output/database node scripts/commands/create-database.js --channels=tests/__data__/input/site.channels.xml --max-clusters=1',
{ encoding: 'utf8' } { encoding: 'utf8' }
) )
const database = fs.readFileSync(path.resolve('tests/__data__/output/test.db'), { const database = fs.readFileSync(path.resolve('tests/__data__/output/database/channels.db'), {
encoding: 'utf8' encoding: 'utf8'
}) })
const item = database.split('\n').find(i => i.includes('AndorraTV.ad')) const item = database.split('\n').find(i => i.includes('AndorraTV.ad'))

View file

@ -5,7 +5,7 @@ const { execSync } = require('child_process')
beforeEach(() => { beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true }) fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output') fs.mkdirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/test.db', 'tests/__data__/temp/test.db') fs.copyFileSync('tests/__data__/input/channels.db', 'tests/__data__/temp/channels.db')
}) })
afterEach(() => { afterEach(() => {
@ -15,7 +15,7 @@ afterEach(() => {
it('can load cluster', () => { it('can load cluster', () => {
const result = execSync( const result = execSync(
'DB_FILEPATH=tests/__data__/temp/test.db LOGS_PATH=tests/__data__/output/logs node scripts/commands/load-cluster.js --cluster-id=1', 'DB_DIR=tests/__data__/temp LOGS_PATH=tests/__data__/output/logs node scripts/commands/load-cluster.js --cluster-id=1',
{ encoding: 'utf8' } { encoding: 'utf8' }
) )
const logs = fs.readFileSync( const logs = fs.readFileSync(
@ -26,11 +26,5 @@ it('can load cluster', () => {
) )
const lines = logs.split('\n') const lines = logs.split('\n')
const parsed = JSON.parse(lines[0]) const parsed = JSON.parse(lines[0])
expect(parsed['K1kaxwsWVjsRIZL6'][0]).toMatchObject({ expect(parsed._id).toBe('K1kaxwsWVjsRIZL6')
title: 'InfoNeu ',
start: '2022-01-06T07:00:00.000Z',
stop: '2022-01-06T08:00:00.000Z',
channel: 'AndorraTV.ad',
lang: 'ca'
})
}) })