mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-11 17:40:03 -04:00
Update scripts
This commit is contained in:
parent
d095023da0
commit
df365451a9
39 changed files with 1256 additions and 508 deletions
|
@ -1,30 +1,25 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { API_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Stream, Channel, Feed } from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading api data...')
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId }: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
import { Logger } from '@freearhey/core'
|
||||
import { ApiClient } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { DataLoader } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const client = new ApiClient({ logger })
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage })
|
||||
|
||||
const requests = [
|
||||
client.download('blocklist.json'),
|
||||
client.download('categories.json'),
|
||||
client.download('channels.json'),
|
||||
client.download('countries.json'),
|
||||
client.download('languages.json'),
|
||||
client.download('regions.json'),
|
||||
client.download('subdivisions.json'),
|
||||
client.download('feeds.json'),
|
||||
client.download('timezones.json')
|
||||
]
|
||||
|
||||
await Promise.all(requests)
|
||||
await Promise.all([
|
||||
loader.download('blocklist.json'),
|
||||
loader.download('categories.json'),
|
||||
loader.download('channels.json'),
|
||||
loader.download('countries.json'),
|
||||
loader.download('languages.json'),
|
||||
loader.download('regions.json'),
|
||||
loader.download('subdivisions.json'),
|
||||
loader.download('feeds.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json')
|
||||
])
|
||||
}
|
||||
|
||||
main()
|
||||
|
|
208
scripts/commands/playlist/edit.ts
Normal file
208
scripts/commands/playlist/edit.ts
Normal file
|
@ -0,0 +1,208 @@
|
|||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import { Channel, Feed, Playlist, Stream } from '../../models'
|
||||
import type { ChannelSearchableData } from '../../types/channel'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
type ChoiceValue = { type: string; value?: Feed | Channel }
|
||||
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let parsedStreams = new Collection()
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const parser = new PlaylistParser({ storage, feedsGroupedByChannelId, channelsKeyById })
|
||||
parsedStreams = await parser.parseFile(filepath)
|
||||
const streamsWithoutId = parsedStreams.filter((stream: Stream) => !stream.id)
|
||||
|
||||
logger.info(
|
||||
`found ${parsedStreams.count()} streams (including ${streamsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const stream of streamsWithoutId.all()) {
|
||||
try {
|
||||
stream.id = await selectChannel(stream, searchIndex, feedsGroupedByChannelId, channelsKeyById)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
streamsWithoutId.forEach((stream: Stream) => {
|
||||
if (stream.id === '-') {
|
||||
stream.id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
stream: Stream,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(stream.getName())
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const url = stream.url.length > 50 ? stream.url.slice(0, 50) + '...' : stream.url
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${stream.name}" (${url}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = new Collection(feedsGroupedByChannelId.get(channelId)) || new Collection()
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.altNames.all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const playlist = new Playlist(parsedStreams)
|
||||
storage.saveSync(filepath, playlist.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
|
@ -1,33 +1,28 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist, Channel, Feed } from '../../models'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import { uniqueId } from 'lodash'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy(feed =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId }: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
|
@ -46,7 +41,7 @@ async function main() {
|
|||
|
||||
logger.info('removing wrong id...')
|
||||
streams = streams.map((stream: Stream) => {
|
||||
if (!stream.channel || channelsGroupedById.missing(stream.channel.id)) {
|
||||
if (!stream.channel || channelsKeyById.missing(stream.channel.id)) {
|
||||
stream.id = ''
|
||||
}
|
||||
|
||||
|
|
|
@ -1,16 +1,6 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import {
|
||||
Stream,
|
||||
Category,
|
||||
Channel,
|
||||
Language,
|
||||
Country,
|
||||
Region,
|
||||
Subdivision,
|
||||
Feed,
|
||||
Timezone
|
||||
} from '../../models'
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { PlaylistParser, DataProcessor, DataLoader } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
import {
|
||||
CategoriesGenerator,
|
||||
|
@ -24,86 +14,36 @@ import {
|
|||
IndexRegionGenerator
|
||||
} from '../../generators'
|
||||
import { DATA_DIR, LOGS_DIR, STREAMS_DIR } from '../../constants'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const generatorsLogger = new Logger({
|
||||
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
|
||||
})
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const categoriesData = await dataStorage.json('categories.json')
|
||||
const countriesData = await dataStorage.json('countries.json')
|
||||
const languagesData = await dataStorage.json('languages.json')
|
||||
const regionsData = await dataStorage.json('regions.json')
|
||||
const subdivisionsData = await dataStorage.json('subdivisions.json')
|
||||
const timezonesData = await dataStorage.json('timezones.json')
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
|
||||
logger.info('preparing data...')
|
||||
const subdivisions = new Collection(subdivisionsData).map(data => new Subdivision(data))
|
||||
const subdivisionsGroupedByCode = subdivisions.keyBy(
|
||||
(subdivision: Subdivision) => subdivision.code
|
||||
)
|
||||
const subdivisionsGroupedByCountryCode = subdivisions.groupBy(
|
||||
(subdivision: Subdivision) => subdivision.countryCode
|
||||
)
|
||||
let regions = new Collection(regionsData).map(data =>
|
||||
new Region(data).withSubdivisions(subdivisions)
|
||||
)
|
||||
const regionsGroupedByCode = regions.keyBy((region: Region) => region.code)
|
||||
const categories = new Collection(categoriesData).map(data => new Category(data))
|
||||
const categoriesGroupedById = categories.keyBy((category: Category) => category.id)
|
||||
const languages = new Collection(languagesData).map(data => new Language(data))
|
||||
const languagesGroupedByCode = languages.keyBy((language: Language) => language.code)
|
||||
const countries = new Collection(countriesData).map(data =>
|
||||
new Country(data)
|
||||
.withRegions(regions)
|
||||
.withLanguage(languagesGroupedByCode)
|
||||
.withSubdivisions(subdivisionsGroupedByCountryCode)
|
||||
)
|
||||
const countriesGroupedByCode = countries.keyBy((country: Country) => country.code)
|
||||
regions = regions.map((region: Region) => region.withCountries(countriesGroupedByCode))
|
||||
|
||||
const timezones = new Collection(timezonesData).map(data =>
|
||||
new Timezone(data).withCountries(countriesGroupedByCode)
|
||||
)
|
||||
const timezonesGroupedById = timezones.keyBy((timezone: Timezone) => timezone.id)
|
||||
|
||||
const channels = new Collection(channelsData).map(data =>
|
||||
new Channel(data)
|
||||
.withCategories(categoriesGroupedById)
|
||||
.withCountry(countriesGroupedByCode)
|
||||
.withSubdivision(subdivisionsGroupedByCode)
|
||||
)
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data)
|
||||
.withChannel(channelsGroupedById)
|
||||
.withLanguages(languagesGroupedByCode)
|
||||
.withTimezones(timezonesGroupedById)
|
||||
.withBroadcastCountries(
|
||||
countriesGroupedByCode,
|
||||
regionsGroupedByCode,
|
||||
subdivisionsGroupedByCode
|
||||
)
|
||||
.withBroadcastRegions(regions)
|
||||
.withBroadcastSubdivisions(subdivisionsGroupedByCode)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
categories,
|
||||
countries,
|
||||
regions,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage,
|
||||
channelsGroupedById,
|
||||
storage: streamsStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await storage.list('**/*.m3u')
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
const totalStreams = streams.count()
|
||||
streams = streams.uniqBy((stream: Stream) =>
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ROOT_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser, StreamTester, CliTable } from '../../core'
|
||||
import { Stream, Feed, Channel } from '../../models'
|
||||
import { PlaylistParser, StreamTester, CliTable, DataProcessor, DataLoader } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import { eachLimit } from 'async-es'
|
||||
import commandExists from 'command-exists'
|
||||
import chalk from 'chalk'
|
||||
import os from 'node:os'
|
||||
import dns from 'node:dns'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
|
||||
const cpus = os.cpus()
|
||||
|
||||
|
@ -54,22 +56,18 @@ async function main() {
|
|||
return
|
||||
}
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy(feed => feed.channel)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId }: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
|
|
|
@ -1,38 +1,33 @@
|
|||
import { DataLoader, DataProcessor, IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Stream, Playlist, Channel, Issue } from '../../models'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist, Channel, Feed, Issue } from '../../models'
|
||||
import validUrl from 'valid-url'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const issueLoader = new IssueLoader()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loader.load()
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId }: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
feedsGroupedByChannelId,
|
||||
channelsGroupedById
|
||||
channelsKeyById
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
|
@ -44,7 +39,7 @@ async function main() {
|
|||
await editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
|
@ -52,7 +47,7 @@ async function main() {
|
|||
await addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
|
@ -101,12 +96,12 @@ async function removeStreams({ streams, issues }: { streams: Collection; issues:
|
|||
async function editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
|
@ -129,7 +124,7 @@ async function editStreams({
|
|||
stream
|
||||
.setChannelId(channelId)
|
||||
.setFeedId(feedId)
|
||||
.withChannel(channelsGroupedById)
|
||||
.withChannel(channelsKeyById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.updateId()
|
||||
.updateName()
|
||||
|
@ -143,8 +138,8 @@ async function editStreams({
|
|||
|
||||
if (data.has('label')) stream.setLabel(label)
|
||||
if (data.has('quality')) stream.setQuality(quality)
|
||||
if (data.has('httpUserAgent')) stream.setHttpUserAgent(httpUserAgent)
|
||||
if (data.has('httpReferrer')) stream.setHttpReferrer(httpReferrer)
|
||||
if (data.has('httpUserAgent')) stream.setUserAgent(httpUserAgent)
|
||||
if (data.has('httpReferrer')) stream.setReferrer(httpReferrer)
|
||||
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
|
@ -153,12 +148,12 @@ async function editStreams({
|
|||
async function addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
|
@ -168,51 +163,32 @@ async function addStreams({
|
|||
const data = issue.data
|
||||
if (data.missing('streamId') || data.missing('streamUrl')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
|
||||
const stringUrl = data.getString('streamUrl') || ''
|
||||
if (!isUri(stringUrl)) return
|
||||
const streamUrl = data.getString('streamUrl') || ''
|
||||
if (!isUri(streamUrl)) return
|
||||
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
const channel: Channel = channelsGroupedById.get(channelId)
|
||||
const channel: Channel = channelsKeyById.get(channelId)
|
||||
if (!channel) return
|
||||
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || ''
|
||||
const httpUserAgent = data.getString('httpUserAgent') || ''
|
||||
const httpReferrer = data.getString('httpReferrer') || ''
|
||||
const label = data.getString('label') || null
|
||||
const quality = data.getString('quality') || null
|
||||
const httpUserAgent = data.getString('httpUserAgent') || null
|
||||
const httpReferrer = data.getString('httpReferrer') || null
|
||||
|
||||
const stream = new Stream({
|
||||
tvg: {
|
||||
id: streamId,
|
||||
name: '',
|
||||
url: '',
|
||||
logo: '',
|
||||
rec: '',
|
||||
shift: ''
|
||||
},
|
||||
channel: channelId,
|
||||
feed: feedId,
|
||||
name: data.getString('channelName') || channel.name,
|
||||
url: stringUrl,
|
||||
group: {
|
||||
title: ''
|
||||
},
|
||||
http: {
|
||||
'user-agent': httpUserAgent,
|
||||
referrer: httpReferrer
|
||||
},
|
||||
line: -1,
|
||||
raw: '',
|
||||
timeshift: '',
|
||||
catchup: {
|
||||
type: '',
|
||||
source: '',
|
||||
days: ''
|
||||
}
|
||||
url: streamUrl,
|
||||
user_agent: httpUserAgent,
|
||||
referrer: httpReferrer,
|
||||
quality,
|
||||
label
|
||||
})
|
||||
.withChannel(channelsGroupedById)
|
||||
.withChannel(channelsKeyById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.setLabel(label)
|
||||
.setQuality(quality)
|
||||
.updateName()
|
||||
.updateFilepath()
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Channel, Stream, Blocked, Feed } from '../../models'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { BlocklistRecord, Stream } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import { uniqueId } from 'lodash'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
|
@ -18,26 +19,21 @@ async function main() {
|
|||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const blocklistContent = await dataStorage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
const blocklistGroupedByChannelId = blocklist.keyBy((blocked: Blocked) => blocked.channelId)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
blocklistRecordsGroupedByChannelId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
|
@ -55,11 +51,11 @@ async function main() {
|
|||
const buffer = new Dictionary()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (stream.channelId) {
|
||||
const channel = channelsGroupedById.get(stream.channelId)
|
||||
const channel = channelsKeyById.get(stream.channelId)
|
||||
if (!channel) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
@ -69,29 +65,32 @@ async function main() {
|
|||
if (duplicate) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const blocked = stream.channel ? blocklistGroupedByChannelId.get(stream.channel.id) : false
|
||||
if (blocked) {
|
||||
if (blocked.reason === 'dmca') {
|
||||
const blocklistRecords = stream.channel
|
||||
? new Collection(blocklistRecordsGroupedByChannelId.get(stream.channel.id))
|
||||
: new Collection()
|
||||
|
||||
blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
|
||||
if (blocklistRecord.reason === 'dmca') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${blocked.channelId}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channelId}" is on the blocklist due to claims of copyright holders (${blocklistRecord.ref})`
|
||||
})
|
||||
} else if (blocked.reason === 'nsfw') {
|
||||
} else if (blocklistRecord.reason === 'nsfw') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${blocked.channelId}" is on the blocklist due to NSFW content (${blocked.ref})`
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channelId}" is on the blocklist due to NSFW content (${blocklistRecord.ref})`
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (log.notEmpty()) {
|
||||
|
|
|
@ -1,44 +1,41 @@
|
|||
import { DataLoader, DataProcessor, IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Blocked, Channel, Issue, Stream, Feed } from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { Issue, Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const loader = new IssueLoader()
|
||||
const issueLoader = new IssueLoader()
|
||||
let report = new Collection()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loader.load()
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const blocklistContent = await dataStorage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
const blocklistGroupedByChannelId = blocklist.keyBy((blocked: Blocked) => blocked.channelId)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const {
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
blocklistRecordsGroupedByChannelId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
const streamsGroupedByUrl = streams.groupBy((stream: Stream) => stream.url)
|
||||
const streamsGroupedByChannelId = streams.groupBy((stream: Stream) => stream.channelId)
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
logger.info('checking broken streams reports...')
|
||||
const brokenStreamReports = issues.filter(issue =>
|
||||
|
@ -94,8 +91,8 @@ async function main() {
|
|||
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (!streamUrl) result.status = 'missing_link'
|
||||
else if (blocklistGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (channelsGroupedById.missing(channelId)) result.status = 'wrong_id'
|
||||
else if (blocklistRecordsGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (channelsKeyById.missing(channelId)) result.status = 'wrong_id'
|
||||
else if (streamsGroupedByUrl.has(streamUrl)) result.status = 'on_playlist'
|
||||
else if (addRequestsBuffer.has(streamUrl)) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
@ -124,7 +121,7 @@ async function main() {
|
|||
|
||||
if (!streamUrl) result.status = 'missing_link'
|
||||
else if (streamsGroupedByUrl.missing(streamUrl)) result.status = 'invalid_link'
|
||||
else if (channelId && channelsGroupedById.missing(channelId)) result.status = 'invalid_id'
|
||||
else if (channelId && channelsKeyById.missing(channelId)) result.status = 'invalid_id'
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
@ -147,16 +144,16 @@ async function main() {
|
|||
}
|
||||
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (channelsGroupedById.missing(channelId)) result.status = 'invalid_id'
|
||||
else if (channelSearchRequestsBuffer.has(channelId)) result.status = 'duplicate'
|
||||
else if (blocklistGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (streamsGroupedByChannelId.has(channelId)) result.status = 'fulfilled'
|
||||
else if (channelsKeyById.missing(channelId)) result.status = 'invalid_id'
|
||||
else if (channelSearchRequestsBuffer.has(streamId)) result.status = 'duplicate'
|
||||
else if (blocklistRecordsGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (streamsGroupedById.has(streamId)) result.status = 'fulfilled'
|
||||
else {
|
||||
const channelData = channelsGroupedById.get(channelId)
|
||||
const channelData = channelsKeyById.get(channelId)
|
||||
if (channelData.length && channelData[0].closed) result.status = 'closed'
|
||||
}
|
||||
|
||||
channelSearchRequestsBuffer.set(channelId, true)
|
||||
channelSearchRequestsBuffer.set(streamId, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue