Update scripts

This commit is contained in:
freearhey 2025-04-29 00:18:35 +03:00
parent 37b4197fb2
commit 6244ba7adb
54 changed files with 2020 additions and 1145 deletions

View file

@ -0,0 +1,23 @@
import { DATA_DIR, API_DIR } from '../../constants'
import { Storage, File } from '@freearhey/core'
import { CSVParser } from '../../core'
import { CSVParserRow } from '../../types/csvParser'
async function main() {
const dataStorage = new Storage(DATA_DIR)
const apiStorage = new Storage(API_DIR)
const parser = new CSVParser()
const files = await dataStorage.list('*.csv')
for (const filepath of files) {
const file = new File(filepath)
const filename = file.name()
const data = await dataStorage.load(file.basename())
const parsed = await parser.parse(data)
const items = parsed.map((row: CSVParserRow) => row.data)
await apiStorage.save(`${filename}.json`, items.toJSON())
}
}
main()

View file

@ -0,0 +1,411 @@
import { CSV, IssueLoader, CSVParser, Issue, IssueData } from '../../core'
import { createChannelId, createFeedId } from '../../utils'
import { Channel, Feed, BlocklistRecord } from '../../models'
import { Storage, Collection, Logger } from '@freearhey/core'
import { DATA_DIR } from '../../constants'
import { DataLoader } from '../../core/dataLoader'
import { DataLoaderData } from '../../types/dataLoader'
const processedIssues = new Collection()
const dataStorage = new Storage(DATA_DIR)
const logger = new Logger({ level: -999 })
async function main() {
const parser = new CSVParser()
const issueLoader = new IssueLoader()
const dataLoader = new DataLoader({ storage: dataStorage })
logger.info('loading issues...')
const issues = await issueLoader.load()
logger.info('loading data...')
const data = await dataLoader.load()
logger.info('processing issues...')
await removeFeeds(issues, data)
await removeChannels(issues, data)
await editFeeds(issues, data)
await editChannels(issues, data)
await addFeeds(issues, data)
await addChannels(issues, data)
await blockChannels(issues, data)
await unblockChannels(issues, data)
logger.info('saving data...')
await save(data)
const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ')
process.stdout.write(`OUTPUT=${output}`)
}
main()
async function save(data: DataLoaderData) {
const channels = data.channels
.sortBy((channel: Channel) => channel.id.toLowerCase())
.map((channel: Channel) => channel.data())
const channelsOutput = new CSV({ items: channels }).toString()
await dataStorage.save('channels.csv', channelsOutput)
const feeds = data.feeds
.sortBy((feed: Feed) => `${feed.getStreamId()}`.toLowerCase())
.map((feed: Feed) => feed.data())
const feedsOutput = new CSV({ items: feeds }).toString()
await dataStorage.save('feeds.csv', feedsOutput)
const blocklistRecords = data.blocklistRecords
.sortBy((blocklistRecord: BlocklistRecord) => blocklistRecord.channelId.toLowerCase())
.map((blocklistRecord: BlocklistRecord) => blocklistRecord.data())
const blocklistOutput = new CSV({ items: blocklistRecords }).toString()
await dataStorage.save('blocklist.csv', blocklistOutput)
}
async function removeFeeds(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('feeds:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id') || issueData.missing('feed_id')) return
const found: Feed = data.feeds.first(
(feed: Feed) =>
feed.channelId === issueData.getString('channel_id') &&
feed.id === issueData.getString('feed_id')
)
if (!found) return
data.feeds.remove((feed: Feed) => feed.channelId === found.channelId && feed.id === found.id)
onFeedRemoval(found.channelId, found.id, data)
processedIssues.push(issue)
})
}
async function editFeeds(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('feeds:edit') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id') || issueData.missing('feed_id')) return
const found: Feed = data.feeds.first(
(feed: Feed) =>
feed.channelId === issueData.getString('channel_id') &&
feed.id === issueData.getString('feed_id')
)
if (!found) return
let channelId: string | undefined = found.channelId
let feedId: string | undefined = found.id
if (issueData.has('feed_name')) {
const name = issueData.getString('feed_name') || found.name
if (name) {
feedId = createFeedId(name)
if (feedId) onFeedIdChange(found.channelId, found.id, feedId, data)
}
}
if (issueData.has('is_main')) {
const isMain = issueData.getBoolean('is_main') || false
if (isMain) onFeedNewMain(channelId, feedId, data)
}
if (!feedId || !channelId) return
found.update(issueData).setId(feedId)
processedIssues.push(issue)
})
}
async function addFeeds(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('feeds:add') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (
issueData.missing('channel_id') ||
issueData.missing('feed_name') ||
issueData.missing('is_main') ||
issueData.missing('broadcast_area') ||
issueData.missing('timezones') ||
issueData.missing('languages') ||
issueData.missing('video_format')
)
return
const channelId = issueData.getString('channel_id')
const feedName = issueData.getString('feed_name') || 'SD'
const feedId = createFeedId(feedName)
if (!channelId || !feedId) return
const found: Feed = data.feeds.first(
(feed: Feed) => feed.channelId === channelId && feed.id === feedId
)
if (found) return
const isMain = issueData.getBoolean('is_main') || false
if (isMain) onFeedNewMain(channelId, feedId, data)
const newFeed = new Feed({
channel: channelId,
id: feedId,
name: feedName,
is_main: issueData.getBoolean('is_main') || false,
broadcast_area: issueData.getArray('broadcast_area') || [],
timezones: issueData.getArray('timezones') || [],
languages: issueData.getArray('languages') || [],
video_format: issueData.getString('video_format')
})
data.feeds.add(newFeed)
processedIssues.push(issue)
})
}
async function removeChannels(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('channels:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id')) return
const found = data.channels.first(
(channel: Channel) => channel.id === issueData.getString('channel_id')
)
if (!found) return
data.channels.remove((channel: Channel) => channel.id === found.id)
onChannelRemoval(found.id, data)
processedIssues.push(issue)
})
}
async function editChannels(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('channels:edit') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id')) return
const found: Channel = data.channels.first(
(channel: Channel) => channel.id === issueData.getString('channel_id')
)
if (!found) return
let channelId: string | undefined = found.id
if (issueData.has('channel_name') || issueData.has('country')) {
const name = issueData.getString('channel_name') || found.name
const country = issueData.getString('country') || found.countryCode
if (name && country) {
channelId = createChannelId(name, country)
if (channelId) onChannelIdChange(found.id, channelId, data)
}
}
if (!channelId) return
found.update(issueData).setId(channelId)
processedIssues.push(issue)
})
}
async function addChannels(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('channels:add') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (
issueData.missing('channel_name') ||
issueData.missing('country') ||
issueData.missing('is_nsfw') ||
issueData.missing('logo') ||
issueData.missing('feed_name') ||
issueData.missing('broadcast_area') ||
issueData.missing('timezones') ||
issueData.missing('languages') ||
issueData.missing('video_format')
)
return
const channelId = createChannelId(
issueData.getString('channel_name'),
issueData.getString('country')
)
if (!channelId) return
const found: Channel = data.channels.first((channel: Channel) => channel.id === channelId)
if (found) return
const newChannel = new Channel({
id: channelId,
name: issueData.getString('channel_name') || '',
alt_names: issueData.getArray('alt_names'),
network: issueData.getString('network'),
owners: issueData.getArray('owners'),
country: issueData.getString('country') || '',
subdivision: issueData.getString('subdivision'),
city: issueData.getString('city'),
categories: issueData.getArray('categories'),
is_nsfw: issueData.getBoolean('is_nsfw') || false,
launched: issueData.getString('launched'),
closed: issueData.getString('closed'),
replaced_by: issueData.getString('replaced_by'),
website: issueData.getString('website'),
logo: issueData.getString('logo') || ''
})
data.channels.add(newChannel)
const feedName = issueData.getString('feed_name') || 'SD'
const newFeed = new Feed({
channel: channelId,
id: createFeedId(feedName),
name: feedName,
is_main: true,
broadcast_area: issueData.getArray('broadcast_area') || [],
timezones: issueData.getArray('timezones') || [],
languages: issueData.getArray('languages') || [],
video_format: issueData.getString('video_format')
})
data.feeds.add(newFeed)
processedIssues.push(issue)
})
}
async function unblockChannels(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('blocklist:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id')) return
const found: BlocklistRecord = data.blocklistRecords.first(
(blocklistRecord: BlocklistRecord) =>
blocklistRecord.channelId === issueData.getString('channel_id')
)
if (!found) return
data.blocklistRecords.remove(
(blocklistRecord: BlocklistRecord) => blocklistRecord.channelId === found.channelId
)
processedIssues.push(issue)
})
}
async function blockChannels(issues: Collection, data: DataLoaderData) {
const requests = issues.filter(
issue => issue.labels.includes('blocklist:add') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const issueData: IssueData = issue.data
if (issueData.missing('channel_id')) return
const found: BlocklistRecord = data.blocklistRecords.first(
(blocklistRecord: BlocklistRecord) =>
blocklistRecord.channelId === issueData.getString('channel_id')
)
if (found) return
const channel = issueData.getString('channel_id')
const reason = issueData.getString('reason')?.toLowerCase()
const ref = issueData.getString('ref')
if (!channel || !reason || !ref) return
const newBlocklistRecord = new BlocklistRecord({
channel,
reason,
ref
})
data.blocklistRecords.add(newBlocklistRecord)
processedIssues.push(issue)
})
}
function onFeedIdChange(
channelId: string,
feedId: string,
newFeedId: string,
data: DataLoaderData
) {
data.channels.forEach((channel: Channel) => {
if (channel.replacedBy && channel.replacedBy === `${channelId}@${feedId}`) {
channel.replacedBy = `${channelId}@${newFeedId}`
}
})
}
function onFeedNewMain(channelId: string, feedId: string, data: DataLoaderData) {
data.feeds.forEach((feed: Feed) => {
if (feed.channelId === channelId && feed.id !== feedId && feed.isMain === true) {
feed.isMain = false
}
})
}
function onFeedRemoval(channelId: string, feedId: string, data: DataLoaderData) {
data.channels.forEach((channel: Channel) => {
if (channel.replacedBy && channel.replacedBy === `${channelId}@${feedId}`) {
channel.replacedBy = ''
}
})
}
function onChannelIdChange(channelId: string, newChannelId: string, data: DataLoaderData) {
data.channels.forEach((channel: Channel) => {
if (channel.replacedBy && channel.replacedBy.includes(channelId)) {
channel.replacedBy = channel.replacedBy.replace(channelId, newChannelId)
}
})
data.feeds.forEach((feed: Feed) => {
if (feed.channelId === channelId) {
feed.channelId = newChannelId
}
})
data.blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
if (blocklistRecord.channelId === channelId) {
blocklistRecord.channelId = newChannelId
}
})
}
function onChannelRemoval(channelId: string, data: DataLoaderData) {
data.channels.forEach((channel: Channel) => {
if (channel.replacedBy && channel.replacedBy.includes(channelId)) {
channel.replacedBy = ''
}
})
data.feeds.remove((feed: Feed) => feed.channelId === channelId)
}

View file

@ -0,0 +1,260 @@
import { Collection, Storage, Dictionary } from '@freearhey/core'
import { DataLoaderData } from '../../types/dataLoader'
import { ValidatorError } from '../../types/validator'
import { DataLoader } from '../../core/dataLoader'
import { DATA_DIR } from '../../constants'
import chalk from 'chalk'
import {
BlocklistRecord,
Subdivision,
Category,
Language,
Timezone,
Channel,
Country,
Region,
Feed
} from '../../models'
import {
BlocklistRecordValidator,
SubdivisionValidator,
CategoryValidator,
LanguageValidator,
TimezoneValidator,
ChannelValidator,
CountryValidator,
RegionValidator,
FeedValidator
} from '../../validators'
let totalErrors = 0
async function main() {
const dataStorage = new Storage(DATA_DIR)
const dataLoader = new DataLoader({ storage: dataStorage })
const data = await dataLoader.load()
validateChannels(data)
validateFeeds(data)
validateRegions(data)
validateBlocklist(data)
validateCategories(data)
validateCountries(data)
validateSubdivisions(data)
validateLanguages(data)
validateTimezones(data)
if (totalErrors > 0) {
console.log(chalk.red(`\r\n${totalErrors} error(s)`))
process.exit(1)
}
}
main()
function validateChannels(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.channels, ['id']).forEach((channel: Channel) => {
errors.add({
line: channel.getLine(),
message: `channel with id "${channel.id}" already exists`
})
})
const validator = new ChannelValidator({ data })
data.channels.forEach((channel: Channel) => {
errors = errors.concat(validator.validate(channel))
})
if (errors.count()) displayErrors('channels.csv', errors)
totalErrors += errors.count()
}
function validateFeeds(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.feeds, ['channelId', 'id']).forEach((feed: Feed) => {
errors.add({
line: feed.getLine(),
message: `feed with channel "${feed.channelId}" and id "${feed.id}" already exists`
})
})
const validator = new FeedValidator({ data })
data.feeds.forEach((feed: Feed) => {
errors = errors.concat(validator.validate(feed))
})
if (errors.count()) displayErrors('feeds.csv', errors)
totalErrors += errors.count()
}
function validateRegions(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.regions, ['code']).forEach((region: Region) => {
errors.add({
line: region.getLine(),
message: `region with code "${region.code}" already exists`
})
})
const validator = new RegionValidator({ data })
data.regions.forEach((region: Region) => {
errors = errors.concat(validator.validate(region))
})
if (errors.count()) displayErrors('regions.csv', errors)
totalErrors += errors.count()
}
function validateBlocklist(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.blocklistRecords, ['channelId', 'ref']).forEach(
(blocklistRecord: BlocklistRecord) => {
errors.add({
line: blocklistRecord.getLine(),
message: `blocklist record with channel "${blocklistRecord.channelId}" and ref "${blocklistRecord.ref}" already exists`
})
}
)
const validator = new BlocklistRecordValidator({ data })
data.blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
errors = errors.concat(validator.validate(blocklistRecord))
})
if (errors.count()) displayErrors('blocklist.csv', errors)
totalErrors += errors.count()
}
function validateCategories(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.categories, ['id']).forEach((category: Category) => {
errors.add({
line: category.getLine(),
message: `category with id "${category.id}" already exists`
})
})
const validator = new CategoryValidator({ data })
data.categories.forEach((category: Category) => {
errors = errors.concat(validator.validate(category))
})
if (errors.count()) displayErrors('categories.csv', errors)
totalErrors += errors.count()
}
function validateCountries(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.countries, ['code']).forEach((country: Country) => {
errors.add({
line: country.getLine(),
message: `country with code "${country.code}" already exists`
})
})
const validator = new CountryValidator({ data })
data.countries.forEach((country: Country) => {
errors = errors.concat(validator.validate(country))
})
if (errors.count()) displayErrors('countries.csv', errors)
totalErrors += errors.count()
}
function validateSubdivisions(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.subdivisions, ['code']).forEach((subdivision: Subdivision) => {
errors.add({
line: subdivision.getLine(),
message: `subdivision with code "${subdivision.code}" already exists`
})
})
const validator = new SubdivisionValidator({ data })
data.subdivisions.forEach((subdivision: Subdivision) => {
errors = errors.concat(validator.validate(subdivision))
})
if (errors.count()) displayErrors('subdivisions.csv', errors)
totalErrors += errors.count()
}
function validateLanguages(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.languages, ['code']).forEach((language: Language) => {
errors.add({
line: language.getLine(),
message: `language with code "${language.code}" already exists`
})
})
const validator = new LanguageValidator({ data })
data.languages.forEach((language: Language) => {
errors = errors.concat(validator.validate(language))
})
if (errors.count()) displayErrors('languages.csv', errors)
totalErrors += errors.count()
}
function validateTimezones(data: DataLoaderData) {
let errors = new Collection()
findDuplicatesBy(data.timezones, ['id']).forEach((timezone: Timezone) => {
errors.add({
line: timezone.getLine(),
message: `timezone with id "${timezone.id}" already exists`
})
})
const validator = new TimezoneValidator({ data })
data.timezones.forEach((timezone: Timezone) => {
errors = errors.concat(validator.validate(timezone))
})
if (errors.count()) displayErrors('timezones.csv', errors)
totalErrors += errors.count()
}
function findDuplicatesBy(items: Collection, keys: string[]) {
const duplicates = new Collection()
const buffer = new Dictionary()
items.forEach((item, i = 0) => {
const normId = keys.map(key => item[key].toString().toLowerCase()).join()
if (buffer.has(normId)) {
duplicates.add(item)
}
buffer.set(normId, true)
})
return duplicates
}
function displayErrors(filepath: string, errors: Collection) {
console.log(`\r\n${chalk.underline(filepath)}`)
errors.forEach((error: ValidatorError) => {
const position = error.line.toString().padEnd(6, ' ')
console.log(` ${chalk.gray(position) + error.message}`)
})
}