mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-11 17:40:03 -04:00
Update scripts
This commit is contained in:
parent
74b3cff1d2
commit
02ec7e6f76
42 changed files with 1317 additions and 694 deletions
|
@ -1,21 +1,37 @@
|
|||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { API_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { API_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
import { Stream, Channel, Feed } from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading api data...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
streams = streams
|
||||
.map(data => new Stream(data))
|
||||
.orderBy([(stream: Stream) => stream.channel])
|
||||
.orderBy((stream: Stream) => stream.getId())
|
||||
.map((stream: Stream) => stream.toJSON())
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
|
|
|
@ -12,7 +12,9 @@ async function main() {
|
|||
client.download('countries.json'),
|
||||
client.download('languages.json'),
|
||||
client.download('regions.json'),
|
||||
client.download('subdivisions.json')
|
||||
client.download('subdivisions.json'),
|
||||
client.download('feeds.json'),
|
||||
client.download('timezones.json')
|
||||
]
|
||||
|
||||
await Promise.all(requests)
|
||||
|
|
|
@ -1,25 +1,36 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist, Channel } from '../../models'
|
||||
import { Stream, Playlist, Channel, Feed } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
logger.info('loading data from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
const groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy(feed =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const parser = new PlaylistParser({ storage })
|
||||
const files = program.args.length ? program.args : await storage.list('**/*.m3u')
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
@ -35,8 +46,8 @@ async function main() {
|
|||
|
||||
logger.info('removing wrong id...')
|
||||
streams = streams.map((stream: Stream) => {
|
||||
if (groupedChannels.missing(stream.channel)) {
|
||||
stream.channel = ''
|
||||
if (!stream.channel || channelsGroupedById.missing(stream.channel.id)) {
|
||||
stream.id = ''
|
||||
}
|
||||
|
||||
return stream
|
||||
|
@ -46,22 +57,22 @@ async function main() {
|
|||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.name,
|
||||
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
|
||||
(stream: Stream) => stream.label,
|
||||
(stream: Stream) => stream.getHorizontalResolution(),
|
||||
(stream: Stream) => stream.getLabel(),
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath) || []
|
||||
|
||||
if (!streams.length) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await storage.save(filepath, playlist.toString())
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,23 @@
|
|||
import { Logger, Storage, Collection, File } from '@freearhey/core'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Stream, Category, Channel, Language, Country, Region, Subdivision } from '../../models'
|
||||
import _ from 'lodash'
|
||||
import {
|
||||
Stream,
|
||||
Category,
|
||||
Channel,
|
||||
Language,
|
||||
Country,
|
||||
Region,
|
||||
Subdivision,
|
||||
Feed,
|
||||
Timezone
|
||||
} from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
import {
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
IndexGenerator,
|
||||
IndexNsfwGenerator,
|
||||
IndexCategoryGenerator,
|
||||
IndexCountryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
|
@ -19,123 +28,134 @@ import { DATA_DIR, LOGS_DIR, STREAMS_DIR } from '../../constants'
|
|||
async function main() {
|
||||
const logger = new Logger()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
const categoriesContent = await dataStorage.json('categories.json')
|
||||
const categories = new Collection(categoriesContent).map(data => new Category(data))
|
||||
const countriesContent = await dataStorage.json('countries.json')
|
||||
const countries = new Collection(countriesContent).map(data => new Country(data))
|
||||
const languagesContent = await dataStorage.json('languages.json')
|
||||
const languages = new Collection(languagesContent).map(data => new Language(data))
|
||||
const regionsContent = await dataStorage.json('regions.json')
|
||||
const regions = new Collection(regionsContent).map(data => new Region(data))
|
||||
const subdivisionsContent = await dataStorage.json('subdivisions.json')
|
||||
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
|
||||
|
||||
logger.info('loading streams...')
|
||||
let streams = await loadStreams({ channels, categories, languages })
|
||||
let totalStreams = streams.count()
|
||||
streams = streams.uniqBy((stream: Stream) => (stream.channel || _.uniqueId()) + stream.timeshift)
|
||||
logger.info(`found ${totalStreams} streams (including ${streams.count()} unique)`)
|
||||
|
||||
const generatorsLogger = new Logger({
|
||||
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
|
||||
})
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const categoriesData = await dataStorage.json('categories.json')
|
||||
const countriesData = await dataStorage.json('countries.json')
|
||||
const languagesData = await dataStorage.json('languages.json')
|
||||
const regionsData = await dataStorage.json('regions.json')
|
||||
const subdivisionsData = await dataStorage.json('subdivisions.json')
|
||||
const timezonesData = await dataStorage.json('timezones.json')
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
|
||||
logger.info('preparing data...')
|
||||
const subdivisions = new Collection(subdivisionsData).map(data => new Subdivision(data))
|
||||
const subdivisionsGroupedByCode = subdivisions.keyBy(
|
||||
(subdivision: Subdivision) => subdivision.code
|
||||
)
|
||||
const subdivisionsGroupedByCountryCode = subdivisions.groupBy(
|
||||
(subdivision: Subdivision) => subdivision.countryCode
|
||||
)
|
||||
let regions = new Collection(regionsData).map(data =>
|
||||
new Region(data).withSubdivisions(subdivisions)
|
||||
)
|
||||
const regionsGroupedByCode = regions.keyBy((region: Region) => region.code)
|
||||
const categories = new Collection(categoriesData).map(data => new Category(data))
|
||||
const categoriesGroupedById = categories.keyBy((category: Category) => category.id)
|
||||
const languages = new Collection(languagesData).map(data => new Language(data))
|
||||
const languagesGroupedByCode = languages.keyBy((language: Language) => language.code)
|
||||
const countries = new Collection(countriesData).map(data =>
|
||||
new Country(data)
|
||||
.withRegions(regions)
|
||||
.withLanguage(languagesGroupedByCode)
|
||||
.withSubdivisions(subdivisionsGroupedByCountryCode)
|
||||
)
|
||||
const countriesGroupedByCode = countries.keyBy((country: Country) => country.code)
|
||||
regions = regions.map((region: Region) => region.withCountries(countriesGroupedByCode))
|
||||
|
||||
const timezones = new Collection(timezonesData).map(data =>
|
||||
new Timezone(data).withCountries(countriesGroupedByCode)
|
||||
)
|
||||
const timezonesGroupedById = timezones.keyBy((timezone: Timezone) => timezone.id)
|
||||
|
||||
const channels = new Collection(channelsData).map(data =>
|
||||
new Channel(data)
|
||||
.withCategories(categoriesGroupedById)
|
||||
.withCountry(countriesGroupedByCode)
|
||||
.withSubdivision(subdivisionsGroupedByCode)
|
||||
)
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data)
|
||||
.withChannel(channelsGroupedById)
|
||||
.withLanguages(languagesGroupedByCode)
|
||||
.withTimezones(timezonesGroupedById)
|
||||
.withBroadcastCountries(
|
||||
countriesGroupedByCode,
|
||||
regionsGroupedByCode,
|
||||
subdivisionsGroupedByCode
|
||||
)
|
||||
.withBroadcastRegions(regions, regionsGroupedByCode)
|
||||
.withBroadcastSubdivisions(subdivisionsGroupedByCode)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await storage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
const totalStreams = streams.count()
|
||||
streams = streams.uniqBy((stream: Stream) => stream.getId() || uniqueId())
|
||||
logger.info(`found ${totalStreams} streams (including ${streams.count()} unique)`)
|
||||
|
||||
logger.info('sorting streams...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.getId(),
|
||||
(stream: Stream) => stream.getHorizontalResolution(),
|
||||
(stream: Stream) => stream.getLabel()
|
||||
],
|
||||
['asc', 'asc', 'desc']
|
||||
)
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.region.m3u...')
|
||||
await new IndexRegionGenerator({ streams, regions, logger: generatorsLogger }).generate()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams({
|
||||
channels,
|
||||
categories,
|
||||
languages
|
||||
}: {
|
||||
channels: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
}) {
|
||||
const groupedChannels = channels.keyBy(channel => channel.id)
|
||||
const groupedCategories = categories.keyBy(category => category.id)
|
||||
const groupedLanguages = languages.keyBy(language => language.code)
|
||||
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage })
|
||||
const files = await storage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
|
||||
streams = streams
|
||||
.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.channel,
|
||||
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
|
||||
(stream: Stream) => stream.label
|
||||
],
|
||||
['asc', 'asc', 'desc', 'asc']
|
||||
)
|
||||
.map((stream: Stream) => {
|
||||
const channel: Channel | undefined = groupedChannels.get(stream.channel)
|
||||
|
||||
if (channel) {
|
||||
const channelCategories = channel.categories
|
||||
.map((id: string) => groupedCategories.get(id))
|
||||
.filter(Boolean)
|
||||
const channelLanguages = channel.languages
|
||||
.map((id: string) => groupedLanguages.get(id))
|
||||
.filter(Boolean)
|
||||
|
||||
stream.categories = channelCategories
|
||||
stream.languages = channelLanguages
|
||||
stream.broadcastArea = channel.broadcastArea
|
||||
stream.isNSFW = channel.isNSFW
|
||||
if (channel.logo) stream.logo = channel.logo
|
||||
} else {
|
||||
const file = new File(stream.filepath)
|
||||
const [_, countryCode] = file.name().match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = countryCode ? [`c/${countryCode.toUpperCase()}`] : []
|
||||
|
||||
stream.broadcastArea = new Collection(defaultBroadcastArea)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ROOT_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { ROOT_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser, StreamTester, CliTable } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
import { Stream, Feed, Channel } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import { eachLimit } from 'async-es'
|
||||
import commandExists from 'command-exists'
|
||||
|
@ -38,8 +38,6 @@ const logger = new Logger()
|
|||
const tester = new StreamTester()
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(ROOT_DIR)
|
||||
|
||||
if (await isOffline()) {
|
||||
logger.error(chalk.red('Internet connection is required for the script to work'))
|
||||
|
||||
|
@ -56,9 +54,25 @@ async function main() {
|
|||
return
|
||||
}
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy(feed => feed.channel)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const parser = new PlaylistParser({ storage })
|
||||
const files = program.args.length ? program.args : await storage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
@ -89,7 +103,7 @@ async function main() {
|
|||
main()
|
||||
|
||||
async function runTest(stream: Stream) {
|
||||
const key = stream.filepath + stream.channel + stream.url
|
||||
const key = stream.filepath + stream.getId() + stream.url
|
||||
results[key] = chalk.white('LOADING...')
|
||||
|
||||
const result = await tester.test(stream)
|
||||
|
@ -125,11 +139,11 @@ function drawTable() {
|
|||
]
|
||||
})
|
||||
streams.forEach((stream: Stream, index: number) => {
|
||||
const status = results[stream.filepath + stream.channel + stream.url] || chalk.gray('PENDING')
|
||||
const status = results[stream.filepath + stream.getId() + stream.url] || chalk.gray('PENDING')
|
||||
|
||||
const row = {
|
||||
'': index,
|
||||
'tvg-id': stream.channel.length > 25 ? stream.channel.slice(0, 22) + '...' : stream.channel,
|
||||
'tvg-id': stream.getId().length > 25 ? stream.getId().slice(0, 22) + '...' : stream.getId(),
|
||||
url: stream.url.length > 100 ? stream.url.slice(0, 97) + '...' : stream.url,
|
||||
status
|
||||
}
|
||||
|
|
|
@ -1,45 +1,63 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist, Channel, Issue } from '../../models'
|
||||
import { Stream, Playlist, Channel, Feed, Issue } from '../../models'
|
||||
import validUrl from 'valid-url'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
let streams: Collection
|
||||
let groupedChannels: Dictionary
|
||||
let issues: Collection
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
|
||||
logger.info('loading issues...')
|
||||
issues = await loader.load()
|
||||
const issues = await loader.load()
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
feedsGroupedByChannelId,
|
||||
channelsGroupedById
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
streams = await parser.parse(files)
|
||||
const streams = await parser.parse(files)
|
||||
|
||||
logger.info('removing broken streams...')
|
||||
await removeStreams(loader)
|
||||
await removeStreams({ streams, issues })
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams(loader)
|
||||
await editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams(loader)
|
||||
await addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
let streams = groupedStreams.get(filepath) || []
|
||||
streams = streams.filter((stream: Stream) => stream.removed === false)
|
||||
|
@ -54,7 +72,7 @@ async function main() {
|
|||
|
||||
main()
|
||||
|
||||
async function removeStreams(loader: IssueLoader) {
|
||||
async function removeStreams({ streams, issues }: { streams: Collection; issues: Collection }) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
|
||||
)
|
||||
|
@ -62,22 +80,35 @@ async function removeStreams(loader: IssueLoader) {
|
|||
const data = issue.data
|
||||
if (data.missing('brokenLinks')) return
|
||||
|
||||
const brokenLinks = data.getString('brokenLinks').split(/\r?\n/).filter(Boolean)
|
||||
const brokenLinks = data.getString('brokenLinks') || ''
|
||||
|
||||
let changed = false
|
||||
brokenLinks.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
brokenLinks
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams(loader: IssueLoader) {
|
||||
async function editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
|
||||
)
|
||||
|
@ -86,59 +117,110 @@ async function editStreams(loader: IssueLoader) {
|
|||
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
let stream = streams.first(
|
||||
let stream: Stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.getString('streamUrl')
|
||||
) as Stream
|
||||
|
||||
)
|
||||
if (!stream) return
|
||||
|
||||
if (data.has('channelId')) {
|
||||
const channel = groupedChannels.get(data.getString('channelId'))
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
if (!channel) return
|
||||
|
||||
stream.channel = data.getString('channelId')
|
||||
stream.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
stream.line = -1
|
||||
stream.name = channel.name
|
||||
if (channelId) {
|
||||
stream
|
||||
.setChannelId(channelId)
|
||||
.setFeedId(feedId)
|
||||
.withChannel(channelsGroupedById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.updateId()
|
||||
.updateName()
|
||||
.updateFilepath()
|
||||
}
|
||||
|
||||
if (data.has('label')) stream.label = data.getString('label')
|
||||
if (data.has('quality')) stream.quality = data.getString('quality')
|
||||
if (data.has('httpUserAgent')) stream.httpUserAgent = data.getString('httpUserAgent')
|
||||
if (data.has('httpReferrer')) stream.httpReferrer = data.getString('httpReferrer')
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || ''
|
||||
const httpUserAgent = data.getString('httpUserAgent') || ''
|
||||
const httpReferrer = data.getString('httpReferrer') || ''
|
||||
|
||||
if (data.has('label')) stream.setLabel(label)
|
||||
if (data.has('quality')) stream.setQuality(quality)
|
||||
if (data.has('httpUserAgent')) stream.setHttpUserAgent(httpUserAgent)
|
||||
if (data.has('httpReferrer')) stream.setHttpReferrer(httpReferrer)
|
||||
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams(loader: IssueLoader) {
|
||||
async function addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsGroupedById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('channelId') || data.missing('streamUrl')) return
|
||||
if (data.missing('streamId') || data.missing('streamUrl')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
|
||||
if (!validUrl.isUri(data.getString('streamUrl'))) return
|
||||
const stringUrl = data.getString('streamUrl') || ''
|
||||
if (!isUri(stringUrl)) return
|
||||
|
||||
const channel = groupedChannels.get(data.getString('channelId'))
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const channel: Channel = channelsGroupedById.get(channelId)
|
||||
if (!channel) return
|
||||
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || ''
|
||||
const httpUserAgent = data.getString('httpUserAgent') || ''
|
||||
const httpReferrer = data.getString('httpReferrer') || ''
|
||||
|
||||
const stream = new Stream({
|
||||
channel: data.getString('channelId'),
|
||||
url: data.getString('streamUrl'),
|
||||
label: data.getString('label'),
|
||||
quality: data.getString('quality'),
|
||||
httpUserAgent: data.getString('httpUserAgent'),
|
||||
httpReferrer: data.getString('httpReferrer'),
|
||||
filepath: `${channel.country.toLowerCase()}.m3u`,
|
||||
tvg: {
|
||||
id: streamId,
|
||||
name: '',
|
||||
url: '',
|
||||
logo: '',
|
||||
rec: '',
|
||||
shift: ''
|
||||
},
|
||||
name: data.getString('channelName') || channel.name,
|
||||
url: stringUrl,
|
||||
group: {
|
||||
title: ''
|
||||
},
|
||||
http: {
|
||||
'user-agent': httpUserAgent,
|
||||
referrer: httpReferrer
|
||||
},
|
||||
line: -1,
|
||||
name: data.getString('channelName') || channel.name
|
||||
raw: '',
|
||||
timeshift: '',
|
||||
catchup: {
|
||||
type: '',
|
||||
source: '',
|
||||
days: ''
|
||||
}
|
||||
})
|
||||
.withChannel(channelsGroupedById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.setLabel(label)
|
||||
.setQuality(quality)
|
||||
.updateName()
|
||||
.updateFilepath()
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
function isUri(string: string) {
|
||||
return validUrl.isUri(encodeURI(string))
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Channel, Stream, Blocked } from '../../models'
|
||||
import { Channel, Stream, Blocked, Feed } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import _ from 'lodash'
|
||||
import { uniqueId } from 'lodash'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
@ -17,41 +17,52 @@ type LogItem = {
|
|||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
logger.info('loading data from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsContent = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const blocklistContent = await dataStorage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
|
||||
logger.info(`found ${blocklist.count()} records`)
|
||||
const blocklistGroupedByChannelId = blocklist.keyBy((blocked: Blocked) => blocked.channelId)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
let groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (const filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath)
|
||||
let streamsGroupedByFilepath = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of streamsGroupedByFilepath.keys()) {
|
||||
const streams = streamsGroupedByFilepath.get(filepath)
|
||||
if (!streams) continue
|
||||
|
||||
const log = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
streams.forEach((stream: Stream) => {
|
||||
const invalidId =
|
||||
stream.channel && !channels.first((channel: Channel) => channel.id === stream.channel)
|
||||
if (invalidId) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is not in the database`
|
||||
})
|
||||
if (stream.channelId) {
|
||||
const channel = channelsGroupedById.get(stream.channelId)
|
||||
if (!channel) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const duplicate = stream.url && buffer.has(stream.url)
|
||||
|
@ -65,19 +76,19 @@ async function main() {
|
|||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const blocked = blocklist.first(blocked => stream.channel === blocked.channel)
|
||||
const blocked = stream.channel ? blocklistGroupedByChannelId.get(stream.channel.id) : false
|
||||
if (blocked) {
|
||||
if (blocked.reason === 'dmca') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
message: `"${blocked.channelId}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
})
|
||||
} else if (blocked.reason === 'nsfw') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is on the blocklist due to NSFW content (${blocked.ref})`
|
||||
message: `"${blocked.channelId}" is on the blocklist due to NSFW content (${blocked.ref})`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,154 +1,164 @@
|
|||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Blocked, Channel, Issue, Stream } from '../../models'
|
||||
import { Blocked, Channel, Issue, Stream, Feed } from '../../models'
|
||||
import { uniqueId } from 'lodash'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const loader = new IssueLoader()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
let report = new Collection()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsGroupedById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data =>
|
||||
new Feed(data).withChannel(channelsGroupedById)
|
||||
)
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) =>
|
||||
feed.channel ? feed.channel.id : uniqueId()
|
||||
)
|
||||
const blocklistContent = await dataStorage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
const blocklistGroupedByChannelId = blocklist.keyBy((blocked: Blocked) => blocked.channelId)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsGroupedById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
const streamsGroupedByUrl = streams.groupBy((stream: Stream) => stream.url)
|
||||
const streamsGroupedByChannel = streams.groupBy((stream: Stream) => stream.channel)
|
||||
|
||||
logger.info('loading channels from api...')
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channelsGroupedById = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.groupBy((channel: Channel) => channel.id)
|
||||
|
||||
logger.info('loading blocklist from api...')
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const blocklistGroupedByChannel = new Collection(blocklistContent)
|
||||
.map(data => new Blocked(data))
|
||||
.groupBy((blocked: Blocked) => blocked.channel)
|
||||
|
||||
let report = new Collection()
|
||||
|
||||
logger.info('checking streams:add requests...')
|
||||
const addRequests = issues.filter(issue => issue.labels.includes('streams:add'))
|
||||
const addRequestsBuffer = new Dictionary()
|
||||
addRequests.forEach((issue: Issue) => {
|
||||
const channelId = issue.data.getString('channelId') || undefined
|
||||
const streamUrl = issue.data.getString('streamUrl')
|
||||
|
||||
const result = new Dictionary({
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:add',
|
||||
channelId,
|
||||
streamUrl,
|
||||
status: 'pending'
|
||||
})
|
||||
|
||||
if (!channelId) result.set('status', 'missing_id')
|
||||
else if (!streamUrl) result.set('status', 'missing_link')
|
||||
else if (blocklistGroupedByChannel.has(channelId)) result.set('status', 'blocked')
|
||||
else if (channelsGroupedById.missing(channelId)) result.set('status', 'wrong_id')
|
||||
else if (streamsGroupedByUrl.has(streamUrl)) result.set('status', 'on_playlist')
|
||||
else if (addRequestsBuffer.has(streamUrl)) result.set('status', 'duplicate')
|
||||
else result.set('status', 'pending')
|
||||
|
||||
addRequestsBuffer.set(streamUrl, true)
|
||||
|
||||
report.add(result.data())
|
||||
})
|
||||
|
||||
logger.info('checking streams:edit requests...')
|
||||
const editRequests = issues.filter(issue => issue.labels.find(label => label === 'streams:edit'))
|
||||
editRequests.forEach((issue: Issue) => {
|
||||
const channelId = issue.data.getString('channelId') || undefined
|
||||
const streamUrl = issue.data.getString('streamUrl') || undefined
|
||||
|
||||
const result = new Dictionary({
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:edit',
|
||||
channelId,
|
||||
streamUrl,
|
||||
status: 'pending'
|
||||
})
|
||||
|
||||
if (!streamUrl) result.set('status', 'missing_link')
|
||||
else if (streamsGroupedByUrl.missing(streamUrl)) result.set('status', 'invalid_link')
|
||||
else if (channelId && channelsGroupedById.missing(channelId)) result.set('status', 'invalid_id')
|
||||
|
||||
report.add(result.data())
|
||||
})
|
||||
const streamsGroupedByChannelId = streams.groupBy((stream: Stream) => stream.channelId)
|
||||
|
||||
logger.info('checking broken streams reports...')
|
||||
const brokenStreamReports = issues.filter(issue =>
|
||||
issue.labels.find(label => label === 'broken stream')
|
||||
issue.labels.find((label: string) => label === 'broken stream')
|
||||
)
|
||||
brokenStreamReports.forEach((issue: Issue) => {
|
||||
const brokenLinks = issue.data.getArray('brokenLinks') || []
|
||||
|
||||
if (!brokenLinks.length) {
|
||||
const result = new Dictionary({
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'broken stream',
|
||||
channelId: undefined,
|
||||
streamId: undefined,
|
||||
streamUrl: undefined,
|
||||
status: 'missing_link'
|
||||
})
|
||||
}
|
||||
|
||||
report.add(result.data())
|
||||
report.add(result)
|
||||
} else {
|
||||
for (const streamUrl of brokenLinks) {
|
||||
const result = new Dictionary({
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'broken stream',
|
||||
channelId: undefined,
|
||||
streamUrl: undefined,
|
||||
streamId: undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
})
|
||||
|
||||
if (streamsGroupedByUrl.missing(streamUrl)) {
|
||||
result.set('streamUrl', streamUrl)
|
||||
result.set('status', 'wrong_link')
|
||||
}
|
||||
|
||||
report.add(result.data())
|
||||
if (streamsGroupedByUrl.missing(streamUrl)) {
|
||||
result.status = 'wrong_link'
|
||||
}
|
||||
|
||||
report.add(result)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('checking streams:add requests...')
|
||||
const addRequests = issues.filter(issue => issue.labels.includes('streams:add'))
|
||||
const addRequestsBuffer = new Dictionary()
|
||||
addRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:add',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (!streamUrl) result.status = 'missing_link'
|
||||
else if (blocklistGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (channelsGroupedById.missing(channelId)) result.status = 'wrong_id'
|
||||
else if (streamsGroupedByUrl.has(streamUrl)) result.status = 'on_playlist'
|
||||
else if (addRequestsBuffer.has(streamUrl)) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
||||
addRequestsBuffer.set(streamUrl, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking streams:edit requests...')
|
||||
const editRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'streams:edit')
|
||||
)
|
||||
editRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:edit',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (!streamUrl) result.status = 'missing_link'
|
||||
else if (streamsGroupedByUrl.missing(streamUrl)) result.status = 'invalid_link'
|
||||
else if (channelId && channelsGroupedById.missing(channelId)) result.status = 'invalid_id'
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking channel search requests...')
|
||||
const channelSearchRequests = issues.filter(issue =>
|
||||
issue.labels.find(label => label === 'channel search')
|
||||
issue.labels.find((label: string) => label === 'channel search')
|
||||
)
|
||||
const channelSearchRequestsBuffer = new Dictionary()
|
||||
channelSearchRequests.forEach((issue: Issue) => {
|
||||
const channelId = issue.data.getString('channelId')
|
||||
const streamId = issue.data.getString('channelId') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = new Dictionary({
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'channel search',
|
||||
channelId,
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: undefined,
|
||||
status: 'pending'
|
||||
})
|
||||
}
|
||||
|
||||
if (!channelId) result.set('status', 'missing_id')
|
||||
else if (channelsGroupedById.missing(channelId)) result.set('status', 'invalid_id')
|
||||
else if (channelSearchRequestsBuffer.has(channelId)) result.set('status', 'duplicate')
|
||||
else if (blocklistGroupedByChannel.has(channelId)) result.set('status', 'blocked')
|
||||
else if (streamsGroupedByChannel.has(channelId)) result.set('status', 'fulfilled')
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (channelsGroupedById.missing(channelId)) result.status = 'invalid_id'
|
||||
else if (channelSearchRequestsBuffer.has(channelId)) result.status = 'duplicate'
|
||||
else if (blocklistGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (streamsGroupedByChannelId.has(channelId)) result.status = 'fulfilled'
|
||||
else {
|
||||
const channelData = channelsGroupedById.get(channelId)
|
||||
if (channelData.length && channelData[0].closed) result.set('status', 'closed')
|
||||
if (channelData.length && channelData[0].closed) result.status = 'closed'
|
||||
}
|
||||
|
||||
channelSearchRequestsBuffer.set(channelId, true)
|
||||
|
||||
report.add(result.data())
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
report = report.orderBy(item => item.issueNumber).filter(item => item.status !== 'pending')
|
||||
|
@ -157,3 +167,10 @@ async function main() {
|
|||
}
|
||||
|
||||
main()
|
||||
|
||||
function truncate(string: string, limit: number = 100) {
|
||||
if (!string) return string
|
||||
if (string.length < limit) return string
|
||||
|
||||
return string.slice(0, limit) + '...'
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue