mirror of
https://github.com/iptv-org/iptv.git
synced 2025-05-13 10:30:03 -04:00
Merge pull request #14226 from iptv-org/patch-2023.09.2
Patch 2023.09.2
This commit is contained in:
commit
435c696fc5
161 changed files with 13475 additions and 6514 deletions
37
.github/ISSUE_TEMPLATE/-----streams_add.yml
vendored
37
.github/ISSUE_TEMPLATE/-----streams_add.yml
vendored
|
@ -12,6 +12,12 @@ body:
|
|||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Channel Name
|
||||
description: "Full name of the channel. May contain any characters except: `,`, `[`, `]`."
|
||||
placeholder: 'BBC America East'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Stream URL
|
||||
|
@ -28,6 +34,37 @@ body:
|
|||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 720p
|
||||
- 576p
|
||||
- 480p
|
||||
- 360p
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Label
|
||||
description: Is there any reason why the broadcast may not work?
|
||||
options:
|
||||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Notes (optional)
|
||||
|
|
29
.github/ISSUE_TEMPLATE/----streams_edit.yml
vendored
29
.github/ISSUE_TEMPLATE/----streams_edit.yml
vendored
|
@ -23,6 +23,25 @@ body:
|
|||
description: Channel ID from [iptv-org.github.io](https://iptv-org.github.io/).
|
||||
placeholder: 'BBCAmericaEast.us'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Channel Name
|
||||
description: "Full name of the channel. May contain any characters except: `,`, `[`, `]`."
|
||||
placeholder: 'BBC America East'
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 720p
|
||||
- 576p
|
||||
- 480p
|
||||
- 360p
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Label
|
||||
|
@ -31,6 +50,16 @@ body:
|
|||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Notes
|
||||
|
|
42
.github/ISSUE_TEMPLATE/---streams-remove.yml
vendored
Normal file
42
.github/ISSUE_TEMPLATE/---streams-remove.yml
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
name: 🗑 Remove stream link
|
||||
description: Request to remove a stream link from the playlist
|
||||
title: 'Remove: '
|
||||
labels: ['streams:remove']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: Stream URL
|
||||
description: Link to the stream from a playlist
|
||||
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Reason
|
||||
options:
|
||||
- Not loading
|
||||
- Constantly interrupts/lagging
|
||||
- Stuck at a single frame
|
||||
- Visual artifacts
|
||||
- Shows looped video
|
||||
- No sound
|
||||
- Displays a message asking to renew subscription
|
||||
- Duplicate
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Notes (optional)
|
||||
placeholder: 'Anything else we should know?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
|
@ -49,4 +49,4 @@ body:
|
|||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
required: true
|
11
.github/ISSUE_TEMPLATE/--bug-report.yml
vendored
11
.github/ISSUE_TEMPLATE/--bug-report.yml
vendored
|
@ -1,6 +1,5 @@
|
|||
name: 🐞 Bug Report
|
||||
description: Report an error in this repository
|
||||
title: 'Fix: '
|
||||
labels: ['bug']
|
||||
|
||||
body:
|
||||
|
@ -9,4 +8,12 @@ body:
|
|||
label: Describe your issue
|
||||
description: Please describe the error in as much detail as possible so that we can fix it quickly.
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
|
@ -1,13 +1,7 @@
|
|||
name: ✂️ Removal request
|
||||
name: ©️ Copyright removal request
|
||||
description: Request to remove content
|
||||
title: 'Remove: '
|
||||
labels: ['removal request']
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
This form is only for requests from the copyright owner or an agent authorized to act on behalf of the copyright owner. If you're experiencing problems viewing a channel please fill a [Broken Stream](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&template=-----broken-stream.yml&title=Fix%3A+) form instead.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Your full legal name
|
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -1,11 +1,11 @@
|
|||
blank_issues_enabled: false
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 🔍 Looking for a channel
|
||||
url: https://github.com/orgs/iptv-org/discussions/new/choose
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/channel-search
|
||||
about: Need help finding a link to a channel stream
|
||||
- name: 💡 Feature request
|
||||
url: https://github.com/orgs/iptv-org/discussions/new/choose
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/ideas
|
||||
about: For any ideas or feature requests
|
||||
- name: ❓ Ask a question
|
||||
url: https://github.com/orgs/iptv-org/discussions/new/choose
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/q-a
|
||||
about: Ask questions about this project
|
||||
|
|
50
.github/workflows/update.yml
vendored
50
.github/workflows/update.yml
vendored
|
@ -2,7 +2,7 @@ name: update
|
|||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 */6 * * *'
|
||||
- cron: '0 0 * * *'
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -10,7 +10,7 @@ jobs:
|
|||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: tibdex/github-app-token@v1
|
||||
- uses: getsentry/action-github-app-token@v2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
|
@ -20,7 +20,12 @@ jobs:
|
|||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- name: setup git
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- uses: actions/setup-node@v3
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
|
@ -28,31 +33,36 @@ jobs:
|
|||
run: npm install
|
||||
- name: load api data
|
||||
run: npm run api:load
|
||||
- name: validate playlists
|
||||
- name: setup database
|
||||
run: npm run db:create
|
||||
- name: update internal playlists
|
||||
run: npm run playlist:update --silent >> $GITHUB_OUTPUT
|
||||
id: playlist-update
|
||||
- name: check internal playlists
|
||||
run: |
|
||||
npm run playlist:lint
|
||||
npm run playlist:validate
|
||||
- name: setup database
|
||||
run: npm run db:create
|
||||
- name: generate playlists
|
||||
- name: generate public playlists
|
||||
run: npm run playlist:generate
|
||||
- name: generate streams.json
|
||||
- name: generate .api/streams.json
|
||||
run: npm run api:generate
|
||||
- name: update readme.md
|
||||
run: npm run readme:update
|
||||
- name: commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
- run: git status
|
||||
- name: commit changes to /streams
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit -m "[Bot] Update /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." -m "${{ steps.playlist-update.outputs.OUTPUT }}" --no-verify
|
||||
- name: commit changes to readme.md
|
||||
run: |
|
||||
git add README.md
|
||||
git status
|
||||
git commit -m "[Bot] Update README.md" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
commit_message: "[Bot] Update README.md"
|
||||
branch: master
|
||||
commit_options: '--no-verify'
|
||||
file_pattern: README.md
|
||||
repository: .
|
||||
commit_user_name: iptv-bot[bot]
|
||||
commit_user_email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit_author: iptv-bot[bot] <84861620+iptv-bot[bot]@users.noreply.github.com>
|
||||
- name: deploy to github pages
|
||||
run: git push
|
||||
- name: deploy public playlists to github pages
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
|
@ -64,7 +74,7 @@ jobs:
|
|||
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit-message: '[Bot] Deploy to GitHub Pages'
|
||||
clean: true
|
||||
- name: deploy to iptv-org/api
|
||||
- name: move .api/streams.json to iptv-org/api
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -5,4 +5,5 @@ node_modules
|
|||
.DS_Store
|
||||
.gh-pages
|
||||
.api
|
||||
.env
|
||||
.env
|
||||
/temp
|
154
CONTRIBUTING.md
154
CONTRIBUTING.md
|
@ -1,89 +1,68 @@
|
|||
# Contributing Guide
|
||||
|
||||
- [Issue Reporting Guidelines](#issue-reporting-guidelines)
|
||||
- [Pull Request Guidelines](#pull-request-guidelines)
|
||||
- [How to?](#how-to)
|
||||
- [Stream Description Scheme](#stream-description-scheme)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Scripts](#scripts)
|
||||
- [Workflows](#workflows)
|
||||
|
||||
## Issue Reporting Guidelines
|
||||
## How to?
|
||||
|
||||
### Add stream link
|
||||
### How to add a new stream link to a playlists?
|
||||
|
||||
Before posting your request, make sure that:
|
||||
You have several options:
|
||||
|
||||
- Channel ID is valid. A complete list of all supported channels and their IDs can be found on [iptv-org.github.io](https://iptv-org.github.io/).
|
||||
- The link you want to add works stably. To check this, open it in one of the players (for example, [VLC player](https://www.videolan.org/vlc/index.html)) and watch the broadcast for at least a minute (some test streams are interrupted after 15-30 seconds).
|
||||
- The link is not already in the playlist. This can be done by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- The link does not lead to Xtream Codes server. [Why don't you accept links to Xtream Codes server?](FAQ.md#why-dont-you-accept-links-to-xtream-codes-server)
|
||||
1. Create a new [issue](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=-----streams_add.yml&title=Add%3A+) and provide all the required information. If the request is approved, the link will be added to the playlist in the next update.
|
||||
|
||||
2. Add the link to the playlist directly using a [pull request](https://github.com/iptv-org/iptv/pulls).
|
||||
|
||||
Regardless of which option you choose, before posting your request please do the following:
|
||||
|
||||
- Make sure the link you want to add works stably. To check this, open it in one of the players (for example, [VLC player](https://www.videolan.org/vlc/index.html)) and watch the broadcast for at least a minute (some test streams are interrupted after 15-30 seconds).
|
||||
- Make sure the link is not already in the playlist. This can be done by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- Find the ID of the channel you want to add in our [database](https://iptv-org.github.io/). If this particular channel is not in the database, then leave a request to add it [here](https://github.com/iptv-org/database/issues/new/choose) and wait until it is approved before continuing.
|
||||
- Make sure the channel is not blacklisted. This can be done by checking the [blocklist.csv](https://github.com/iptv-org/database/blob/master/data/blocklist.csv) file.
|
||||
- The link does not lead to the Xtream Codes server. [Why don't you accept links to Xtream Codes server?](FAQ.md#why-dont-you-accept-links-to-xtream-codes-server)
|
||||
- If you know that the broadcast only works in certain countries or it is periodically interrupted, do not forget to indicate this in the request.
|
||||
|
||||
An issue without a valid channel ID or working link to the stream will be closed immediately.
|
||||
A requests without a valid channel ID or working link to the stream will be closed immediately.
|
||||
|
||||
### Edit stream description
|
||||
Note all links in playlists are sorted automatically by scripts so there is no need to sort them manually. For more info, see [Scripts](#scripts).
|
||||
|
||||
Before posting your request, make sure that:
|
||||
### How to add a link to YouTube live?
|
||||
|
||||
- The link is still in our playlists. This can be verified by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
You can use one of the services like [abskmj/youtube-hls-m3u8](https://github.com/abskmj/youtube-hls-m3u8) that allow you to create permanent link to the broadcast that can be opened in most players.
|
||||
|
||||
### How to distinguish a link to an Xtream Codes server from a regular one?
|
||||
|
||||
Most of them have this form:
|
||||
|
||||
`http(s)://{hostname}:{port}/{username}/{password}/{channelID}` (port is often `25461`)
|
||||
|
||||
To make sure that the link leads to the Xtream Codes server, copy the `hostname`, `port`, `username` and `password` into the link below and try to open it in a browser:
|
||||
|
||||
`http(s)://{hostname}:{port}/panel_api.php?username={username}&password={password}`
|
||||
|
||||
If the link answers, you're with an Xtream Codes server.
|
||||
|
||||
### How to report a broken stream?
|
||||
|
||||
Fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&projects=&template=---broken-stream.yml&title=Broken%3A+) and as soon as a working replacement appears, we will add it to the playlist or at least remove the non-working one.
|
||||
|
||||
The only thing before publishing your report is to make sure that:
|
||||
|
||||
- The link is still in our playlists. You can verify this by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- The link really doesn't work and is not just [geo-blocked](https://en.wikipedia.org/wiki/Geo-blocking). To check this, you can either use a [VPN](https://en.wikipedia.org/wiki/Virtual_private_network) or services such as [streamtest.in](https://streamtest.in/).
|
||||
|
||||
An issue without a valid link will be closed immediately.
|
||||
|
||||
### Report broken link
|
||||
### How do I remove my channel from playlist?
|
||||
|
||||
Before posting your report, make sure that:
|
||||
To request removal of a link to a channel from the repository, you need to fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=removal+request&projects=&template=-removal-request.yml&title=Remove%3A+) and wait for the request to be reviewed (this usually takes no more than 1 business day). And if the request is approved, links to the channel will be immediately removed from the repository.
|
||||
|
||||
- The link is still in our playlists. This can be verified by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- The link is not blocked in your country. To check this, you can use either a [VPN](https://en.wikipedia.org/wiki/Virtual_private_network) or services such as [streamtest.in](https://streamtest.in/).
|
||||
The channel will also be added to our [blocklist](https://github.com/iptv-org/database/blob/master/data/blocklist.csv) to avoid its appearance in our playlists in the future.
|
||||
|
||||
An issue should contain a report for only one channel, otherwise it will be closed immediately.
|
||||
|
||||
### Bug report
|
||||
|
||||
Please use this form only if you have found a bug in one of the scripts or the repository as a whole. To report broken link or an error in the stream description, use one of the methods described above.
|
||||
|
||||
### Removal request
|
||||
|
||||
To request the removal of a link to a channel from repository, you need to fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=removal+request&projects=&template=-removal-request.yml&title=Remove%3A+) and if your request is approved the link will be removed within 1 business day. The channel will also be added to our [blocklist](https://github.com/iptv-org/database/blob/master/data/blocklist.csv) to avoid its appearance in our playlists in the future.
|
||||
|
||||
Please keep in mind that we only accept removal requests from channel copyright holders and their official representatives, any other requests will be closed immediately.
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
### Add stream link
|
||||
|
||||
If you want to add a new stream link to playlists, please do the following:
|
||||
|
||||
- Make sure that the link you want to add works stably. To do this, open it in one of the players (for example, [VLC player](https://www.videolan.org/vlc/index.html)) and watch the broadcast for at least a minute (some test streams are interrupted after 15-30 seconds).
|
||||
- Make sure the link does not lead to Xtream Codes server. [How to distinguish a link to an Xtream Codes server from a regular one?](FAQ.md#how-to-distinguish-a-link-to-an-xtream-codes-server-from-a-regular-one)
|
||||
- Find in our [database](https://iptv-org.github.io/) the ID of the channel you want to add. If this particular channel is not in the database, then first leave a request to add it [here](https://github.com/iptv-org/database/issues/new/choose) and once the request is approved, you can proceed further.
|
||||
- Then open the [/streams](/streams) folder and select the file corresponding to the country of this channel (for example, for `TF1.fr` it will be `fr.m3u`) and then insert the description of the stream and a link to it at the very end of the file. For more info, see [Stream Description Scheme](#stream-description-scheme).
|
||||
- If you know that the broadcast only works in certain countries, do not forget to add the `[Geo-blocked]` label to the stream description.
|
||||
- For broadcasts that may be periodically interrupted, there is the label `[Not 24/7]`.
|
||||
- Finally, commit all changes and submit a pull request.
|
||||
|
||||
If the request is approved by other community members, then the link will appear in the playlist on the next update.
|
||||
|
||||
### Remove broken link
|
||||
|
||||
If you find a link in the playlist that does not work, follow the steps below:
|
||||
|
||||
- Verify that the link is indeed not working and has not just been [geo-blocked](https://en.wikipedia.org/wiki/Geo-blocking). To do this, you can either use a [VPN](https://en.wikipedia.org/wiki/Virtual_private_network) or services such as [streamtest.in](https://streamtest.in/).
|
||||
- If the link works, but only when using a VPN, then tag it with [Geo-blocked]. For more info, see [Stream Description Scheme](#stream-description-scheme)
|
||||
- If it turns out that the link works but not 24/7, then add the [Not 24/7] label to it.
|
||||
- If the link is still not working, then continue.
|
||||
- Use a [search](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) to find which file this link is stored in, open it and delete the link along with the description.
|
||||
- Commit the changes and make a pull request.
|
||||
|
||||
### Update README.md
|
||||
|
||||
- Open `.readme/template.md`.
|
||||
- Make the necessary changes.
|
||||
- Commit all changes and send a pull request.
|
||||
|
||||
### Update this Guide
|
||||
|
||||
- Open `.github/CONTRIBUTING.md`.
|
||||
- Make the necessary changes.
|
||||
- Commit all changes and send a pull request.
|
||||
Please note that we only accept removal requests from channel owners and their official representatives, all other requests will be closed immediately.
|
||||
|
||||
## Stream Description Scheme
|
||||
|
||||
|
@ -97,7 +76,7 @@ STREAM_URL
|
|||
| Attribute | Description | Required | Valid values |
|
||||
| -------------- | ------------------------------------------------------------------------------------------ | -------- | -------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `CHANNEL_ID` | Channel ID. | Optional | Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). |
|
||||
| `CHANNEL_NAME` | Full name of the channel. May contain any characters except: `,`, `(`, `)`, `[`, `]`. | Required | - |
|
||||
| `CHANNEL_NAME` | Full name of the channel. May contain any characters except: `,`, `[`, `]`. | Required | - |
|
||||
| `RESOLUTION` | Maximum stream resolution | Optional | `2160p`, `1080p`, `720p`, `480p`, `360p` etc |
|
||||
| `LABEL` | Specified in cases where the broadcast for some reason may not be available to some users. | Optional | `Geo-blocked` or `Not 24/7` |
|
||||
| `STREAM_URL` | Stream URL. | Required | - |
|
||||
|
@ -109,7 +88,7 @@ Example:
|
|||
https://example.com/playlist.m3u8
|
||||
```
|
||||
|
||||
Also, if necessary, you can specify custom HTTP User-Agent and Referrer via the `#EXTVLCOPT` tag:
|
||||
Also, if necessary, you can specify custom [HTTP User-Agent](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent) and [Referrer](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referer) via the `#EXTVLCOPT` tag:
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us",Example TV
|
||||
|
@ -134,3 +113,40 @@ http://example.com/stream.m3u8
|
|||
- `tests/`: contains tests to check the scripts.
|
||||
- `CONTRIBUTING.md`: file you are currently reading.
|
||||
- `README.md`: project description generated from the contents of the `.readme/` folder.
|
||||
|
||||
## Scripts
|
||||
|
||||
These scripts are created to automate routine processes in the repository and make it a bit easier to maintain.
|
||||
|
||||
For scripts to work, you must have [Node.js](https://nodejs.org/en) installed on your computer.
|
||||
|
||||
To run scripts use the `npm run <script-name>` command.
|
||||
|
||||
- `act:check`: allows to run the [check](https://github.com/iptv-org/iptv/blob/master/.github/workflows/check.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `act:update`: allows to test the [update](https://github.com/iptv-org/iptv/blob/master/.github/workflows/update.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `api:load`: downloads the latest channel and stream data from the [iptv-org/api](https://github.com/iptv-org/api).
|
||||
- `api:generate`: generates a JSON file with all streams for the [iptv-org/api](https://github.com/iptv-org/api) repository.
|
||||
- `api:deploy`: allows to manually upload a JSON file created via `api:generate` to the [iptv-org/api](https://github.com/iptv-org/api) repository. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `db:create`: сreates a temporary file `temp/database/streams.db` containing all links from the [/streams]() folder.
|
||||
- `playlist:update`: triggers an update of internal playlists. The process involves processing approved requests from issues, URL normalization, and sorting links by channel name, quality, and label.
|
||||
- `playlist:validate`: сhecks ids and links in internal playlists for errors.
|
||||
- `playlist:lint`: сhecks internal playlists for syntax errors.
|
||||
- `playlist:generate`: generates all public playlists.
|
||||
- `playlist:deploy`: allows to manually publish all generated via `playlist:generate` playlists. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `readme:update`: updates the list of playlists in [README.md](README.md).
|
||||
- `report:create`: shows a list of all current requests and their status.
|
||||
- `format`: (shorthand) sequentially runs the `db:create` and `db:create` commands.
|
||||
- `check`: (shorthand) sequentially runs the `api:load`, `playlist:lint` and `playlist:validate` commands.
|
||||
- `update`: (shorthand) sequentially runs the `api:load`, `db:create`, `playlist:generate`, `api:generate` and `readme:update` commands.
|
||||
- `deploy`: (shorthand) sequentially runs the `playlist:deploy` and `api:deploy` commands.
|
||||
- `report`: (shorthand) sequentially runs the `api:load` and `report:create` commands.
|
||||
- `test`: runs a test of all the scripts described above.
|
||||
|
||||
## Workflows
|
||||
|
||||
To automate the run of the scripts described above, we use the [GitHub Actions workflows](https://docs.github.com/en/actions/using-workflows).
|
||||
|
||||
Each workflow includes its own set of scripts that can be run either manually or in response to an event.
|
||||
|
||||
- `check`: sequentially runs the `playlist:check` and `playlist:validate` scripts when a new pull request appears, and blocks the merge if it detects an error in it.
|
||||
- `update`: every day at 0:00 UTC sequentially runs `api:load`, `db:create`, `playlist:update`, `playlist:lint`, `playlist:validate`, `playlist:generate`, `api:generate` and `readme:update` scripts and deploys the output files if successful.
|
||||
|
|
28
FAQ.md
28
FAQ.md
|
@ -6,18 +6,6 @@ Start by asking our community for help via [Discussions](https://github.com/orgs
|
|||
|
||||
But keep in mind that not all TV channels are available for viewing online, and in this case there is little we can do about it.
|
||||
|
||||
### How can I add stream to playlists?
|
||||
|
||||
You have several options:
|
||||
|
||||
1. Create a new [issue](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=-----streams_add.yml&title=Add%3A+) with a valid channel ID and a link to the stream. If the request is approved , the link will be added to the playlist in the next update. For or more info, see [Issue Reporting Guidelines](CONTRIBUTING.md#issue-reporting-guidelines).
|
||||
|
||||
2. Or you can add the link to the playlist directly via pull request. For more info, see [Pull Request Guidelines](CONTRIBUTING.md#pull-request-guidelines).
|
||||
|
||||
### How to report a broken stream?
|
||||
|
||||
Fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&projects=&template=---broken-stream.yml&title=Broken%3A+) and as soon as there is a working replacement, we will add it to the playlist or at least remove the non-working one.
|
||||
|
||||
### Does the playlist have a channel guide?
|
||||
|
||||
Yes. See [iptv-org/epg](https://github.com/iptv-org/epg) for more info.
|
||||
|
@ -30,10 +18,6 @@ No.
|
|||
|
||||
The site contains a list of all TV channels in the world and only those of them for which we have working stream links are included in the playlists.
|
||||
|
||||
### How can I add a link to YouTube live?
|
||||
|
||||
Since not all players allow you to open links to YouTube directly, we also cannot add them to playlists yet. However, some services like [abskmj/youtube-hls-m3u8](https://github.com/abskmj/youtube-hls-m3u8) allow you to get around this limitation by creating permalinks to the feed that can be played as normal. And these are the kind of links you can add to the playlist.
|
||||
|
||||
### Can I add a radio broadcast?
|
||||
|
||||
Yes, if it is a [visual radio](https://en.wikipedia.org/wiki/Visual_radio) in which a video and audio are shown at the same time.
|
||||
|
@ -41,15 +25,3 @@ Yes, if it is a [visual radio](https://en.wikipedia.org/wiki/Visual_radio) in wh
|
|||
### Why don't you accept links to Xtream Codes server?
|
||||
|
||||
Xtream Codes streams tend to be very unstable, and often links to them fail very quickly, so it's easier for us to initially exclude them from the playlist than to search for expired ones every day.
|
||||
|
||||
### How to distinguish a link to an Xtream Codes server from a regular one?
|
||||
|
||||
Most of them have this form:
|
||||
|
||||
`http(s)://{hostname}:{port}/{username}/{password}/{channelID}` (port is often `25461`)
|
||||
|
||||
To make sure that the link leads to the Xtream Codes server, copy the `hostname`, `port`, `username` and `password` into the link below and try to open it in a browser:
|
||||
|
||||
`http(s)://{hostname}:{port}/panel_api.php?username={username}&password={password}`
|
||||
|
||||
If the link answers, you're with an Xtream Codes server.
|
||||
|
|
6608
package-lock.json
generated
6608
package-lock.json
generated
File diff suppressed because it is too large
Load diff
61
package.json
61
package.json
|
@ -4,52 +4,69 @@
|
|||
"act:check": "act pull_request -W .github/workflows/check.yml",
|
||||
"act:update": "act workflow_dispatch -W .github/workflows/update.yml",
|
||||
"api:load": "./scripts/commands/api/load.sh",
|
||||
"api:generate": "node scripts/commands/api/generate.js",
|
||||
"api:generate": "npm run ts-node scripts/commands/api/generate.ts",
|
||||
"api:deploy": "npx gh-pages-clean && npx gh-pages -a -m \"Deploy to iptv-org/api\" -d .api -r https://$GITHUB_TOKEN@github.com/iptv-org/api.git",
|
||||
"db:create": "node scripts/commands/database/create.js",
|
||||
"playlist:format": "node scripts/commands/playlist/format.js",
|
||||
"playlist:generate": "node scripts/commands/playlist/generate.js",
|
||||
"playlist:validate": "node scripts/commands/playlist/validate.js",
|
||||
"db:create": "npm run ts-node scripts/commands/database/create.ts",
|
||||
"playlist:update": "npm run ts-node scripts/commands/playlist/update.ts",
|
||||
"playlist:generate": "npm run ts-node scripts/commands/playlist/generate.ts",
|
||||
"playlist:validate": "npm run ts-node scripts/commands/playlist/validate.ts",
|
||||
"playlist:lint": "npx m3u-linter -c m3u-linter.json",
|
||||
"playlist:deploy": "npx gh-pages-clean && npx gh-pages -m \"Deploy to GitHub Pages\" -d .gh-pages -r https://$GITHUB_TOKEN@github.com/iptv-org/iptv.git",
|
||||
"readme:update": "node scripts/commands/readme/update.js",
|
||||
"report:create": "node scripts/commands/report/create.js",
|
||||
"readme:update": "npm run ts-node scripts/commands/readme/update.ts",
|
||||
"report:create": "npm run ts-node scripts/commands/report/create.ts",
|
||||
"format": "npm run db:create && npm run playlist:format",
|
||||
"check": "npm run api:load && npm run playlist:lint && npm run playlist:validate",
|
||||
"update": "npm run api:load && npm run db:create && npm run playlist:generate && npm run api:generate && npm run readme:update",
|
||||
"deploy": "npm run playlist:deploy && npm run api:deploy",
|
||||
"report": "npm run api:load && npm run report:create",
|
||||
"test": "jest --runInBand"
|
||||
"test": "jest --runInBand",
|
||||
"ts-node": "node --no-warnings=ExperimentalWarning --experimental-specifier-resolution=node --loader ts-node/esm"
|
||||
},
|
||||
"jest": {
|
||||
"testRegex": "tests/(.*?/)?.*test.js$",
|
||||
"transform": {
|
||||
"^.*test.(ts|js)$": "ts-jest"
|
||||
},
|
||||
"testRegex": "tests/(.*?/)?.*test.(ts|js)$",
|
||||
"setupFilesAfterEnv": [
|
||||
"@alex_neo/jest-expect-message"
|
||||
"jest-expect-message"
|
||||
]
|
||||
},
|
||||
"signale": {
|
||||
"displayLabel": false,
|
||||
"displayScope": false,
|
||||
"displayBadge": false
|
||||
},
|
||||
"author": "Arhey",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@alex_neo/jest-expect-message": "^1.0.5",
|
||||
"@octokit/core": "^4.2.1",
|
||||
"@octokit/plugin-paginate-rest": "^7.0.0",
|
||||
"@octokit/plugin-paginate-rest": "^7.1.2",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^7.1.3",
|
||||
"@octokit/types": "^11.1.0",
|
||||
"@seald-io/nedb": "^4.0.2",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/glob": "^8.1.0",
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/jest-expect-message": "^1.1.0",
|
||||
"@types/lodash": "^4.14.198",
|
||||
"@types/signale": "^1.4.4",
|
||||
"chalk": "^4.1.2",
|
||||
"commander": "^8.3.0",
|
||||
"dayjs": "^1.10.7",
|
||||
"fs-extra": "^10.0.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"gh-pages": "^5.0.0",
|
||||
"iptv-checker": "^0.26.0",
|
||||
"iptv-playlist-parser": "^0.12.1",
|
||||
"jest": "^27.5.1",
|
||||
"jest-expect-message": "^1.0.2",
|
||||
"glob": "^10.3.4",
|
||||
"iptv-playlist-parser": "^0.12.3",
|
||||
"jest-expect-message": "^1.1.3",
|
||||
"lodash": "^4.17.21",
|
||||
"m3u-linter": "^0.3.0",
|
||||
"markdown-include": "^0.4.3",
|
||||
"natural-orderby": "^2.0.3",
|
||||
"nedb-promises": "^5.0.2",
|
||||
"normalize-url": "^6.1.0",
|
||||
"natural-orderby": "^3.0.2",
|
||||
"normalize-url": "^7.2.0",
|
||||
"signale": "^1.4.0",
|
||||
"transliteration": "^2.2.0"
|
||||
"transliteration": "^2.3.5",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
const { logger, db, file } = require('../../core')
|
||||
const _ = require('lodash')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
|
||||
|
||||
async function main() {
|
||||
logger.info(`loading streams...`)
|
||||
await db.streams.load()
|
||||
|
||||
let streams = await db.streams.find({})
|
||||
streams = _.sortBy(streams, 'channel')
|
||||
streams = streams.map(stream => {
|
||||
let data = {
|
||||
channel: stream.channel,
|
||||
url: stream.url,
|
||||
http_referrer: stream.http_referrer,
|
||||
user_agent: stream.user_agent
|
||||
}
|
||||
|
||||
return data
|
||||
})
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
|
||||
}
|
||||
|
||||
main()
|
25
scripts/commands/api/generate.ts
Normal file
25
scripts/commands/api/generate.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { API_DIR, DB_DIR } from '../../constants'
|
||||
import { Logger, Database, Collection, Storage } from '../../core'
|
||||
import { Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading streams...`)
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
|
||||
const streams = new Collection(docs as any[])
|
||||
.map(data => new Stream(data))
|
||||
.orderBy((stream: Stream) => stream.channel)
|
||||
.map((stream: Stream) => stream.toJSON())
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
const storage = new Storage(API_DIR)
|
||||
await storage.save('streams.json', streams.toJSON())
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,11 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
mkdir -p scripts/tmp/data
|
||||
curl -L -o scripts/tmp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o scripts/tmp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o scripts/tmp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o scripts/tmp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o scripts/tmp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o scripts/tmp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o scripts/tmp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o scripts/tmp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
||||
mkdir -p temp/data
|
||||
curl -L -o temp/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
|
||||
curl -L -o temp/data/categories.json https://iptv-org.github.io/api/categories.json
|
||||
curl -L -o temp/data/channels.json https://iptv-org.github.io/api/channels.json
|
||||
curl -L -o temp/data/streams.json https://iptv-org.github.io/api/streams.json
|
||||
curl -L -o temp/data/countries.json https://iptv-org.github.io/api/countries.json
|
||||
curl -L -o temp/data/languages.json https://iptv-org.github.io/api/languages.json
|
||||
curl -L -o temp/data/regions.json https://iptv-org.github.io/api/regions.json
|
||||
curl -L -o temp/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json
|
|
@ -1,40 +0,0 @@
|
|||
const { db, file, parser, store, logger } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const options = program
|
||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
logger.info(`looking for streams...`)
|
||||
const streams = []
|
||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||
for (const filepath of files) {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
item.filepath = filepath
|
||||
|
||||
const stream = store.create()
|
||||
|
||||
stream.set('channel', item.tvg.id)
|
||||
stream.set('title', item.name)
|
||||
stream.set('filepath', item.filepath)
|
||||
stream.set('url', item.url)
|
||||
stream.set('http_referrer', item.http.referrer)
|
||||
stream.set('user_agent', item.http['user-agent'])
|
||||
|
||||
streams.push(stream)
|
||||
}
|
||||
}
|
||||
logger.info(`found ${streams.length} streams`)
|
||||
|
||||
logger.info('saving to the database...')
|
||||
await db.streams.load()
|
||||
await db.streams.reset()
|
||||
const data = streams.map(stream => stream.data())
|
||||
await db.streams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
33
scripts/commands/database/create.ts
Normal file
33
scripts/commands/database/create.ts
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { Storage, Logger, PlaylistParser, Collection, Database } from '../../core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { STREAMS_DIR, DB_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`looking for streams...`)
|
||||
const storage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage
|
||||
})
|
||||
const files = await storage.list(`**/*.m3u`)
|
||||
let streams = new Collection()
|
||||
for (let filepath of files) {
|
||||
const playlist: Playlist = await parser.parse(filepath)
|
||||
streams = streams.concat(playlist.streams)
|
||||
}
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('clean up the storage...')
|
||||
const dbStorage = new Storage(DB_DIR)
|
||||
await dbStorage.clear('streams.db')
|
||||
|
||||
logger.info('saving streams to the database...')
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const data = streams.map((stream: Stream) => stream.data()).all()
|
||||
await dbStreams.insert(data)
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,33 +0,0 @@
|
|||
const { create: createPlaylist } = require('../../core/playlist')
|
||||
const { normalize: normalizeUrl } = require('../../core/url')
|
||||
const { db, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
logger.info('loading streams...')
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
|
||||
streams = streams.map(stream => {
|
||||
stream.url = normalizeUrl(stream.url)
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = orderBy(
|
||||
streams,
|
||||
['channel', s => (s.channel ? '' : s.title), 'url'],
|
||||
['asc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const files = _.groupBy(streams, 'filepath')
|
||||
for (const filepath in files) {
|
||||
const playlist = createPlaylist(files[filepath], { public: false })
|
||||
await file.create(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,76 +0,0 @@
|
|||
const { db, generator, api, logger, file } = require('../../core')
|
||||
const { orderBy } = require('natural-orderby')
|
||||
const _ = require('lodash')
|
||||
|
||||
async function main() {
|
||||
const streams = await loadStreams()
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await generator.generate('categories', streams)
|
||||
logger.info('generating countries/...')
|
||||
await generator.generate('countries', streams)
|
||||
logger.info('generating languages/...')
|
||||
await generator.generate('languages', streams)
|
||||
logger.info('generating regions/...')
|
||||
await generator.generate('regions', streams)
|
||||
logger.info('generating index.category.m3u...')
|
||||
await generator.generate('index_category_m3u', streams)
|
||||
logger.info('generating index.country.m3u...')
|
||||
await generator.generate('index_country_m3u', streams)
|
||||
logger.info('generating index.language.m3u...')
|
||||
await generator.generate('index_language_m3u', streams)
|
||||
logger.info('generating index.m3u...')
|
||||
await generator.generate('index_m3u', streams)
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await generator.generate('index_nsfw_m3u', streams)
|
||||
logger.info('generating index.region.m3u...')
|
||||
await generator.generate('index_region_m3u', streams)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams() {
|
||||
await db.streams.load()
|
||||
let streams = await db.streams.find({})
|
||||
streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc'])
|
||||
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
|
||||
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.categories.load()
|
||||
let categories = await api.categories.all()
|
||||
categories = _.keyBy(categories, 'id')
|
||||
|
||||
await api.languages.load()
|
||||
let languages = await api.languages.all()
|
||||
languages = _.keyBy(languages, 'code')
|
||||
|
||||
streams = streams.map(stream => {
|
||||
const channel = channels[stream.channel] || null
|
||||
const filename = file.getFilename(stream.filepath)
|
||||
const [_, code] = filename.match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
|
||||
|
||||
if (channel) {
|
||||
stream.categories = channel.categories.map(id => categories[id]).filter(i => i)
|
||||
stream.languages = channel.languages.map(id => languages[id]).filter(i => i)
|
||||
stream.broadcast_area = channel.broadcast_area
|
||||
stream.is_nsfw = channel.is_nsfw
|
||||
stream.logo = channel.logo
|
||||
} else {
|
||||
stream.categories = []
|
||||
stream.languages = []
|
||||
stream.broadcast_area = defaultBroadcastArea
|
||||
stream.is_nsfw = false
|
||||
stream.logo = null
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
streams = orderBy(streams, ['title'], ['asc'])
|
||||
|
||||
return streams
|
||||
}
|
148
scripts/commands/playlist/generate.ts
Normal file
148
scripts/commands/playlist/generate.ts
Normal file
|
@ -0,0 +1,148 @@
|
|||
import { File, Storage } from '../../core'
|
||||
import { Stream, Category, Channel, Language, Country, Region, Subdivision } from '../../models'
|
||||
import { Database } from '../../core/database'
|
||||
import { Collection } from '../../core/collection'
|
||||
import { Logger } from '../../core/logger'
|
||||
import _ from 'lodash'
|
||||
import {
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
IndexGenerator,
|
||||
IndexNsfwGenerator,
|
||||
IndexCategoryGenerator,
|
||||
IndexCountryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
IndexRegionGenerator
|
||||
} from '../../generators'
|
||||
import { DATA_DIR, DB_DIR, LOGS_DIR } from '../../constants'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
|
||||
const categoriesContent = await storage.json('categories.json')
|
||||
const categories = new Collection(categoriesContent).map(data => new Category(data))
|
||||
|
||||
const countriesContent = await storage.json('countries.json')
|
||||
const countries = new Collection(countriesContent).map(data => new Country(data))
|
||||
|
||||
const languagesContent = await storage.json('languages.json')
|
||||
const languages = new Collection(languagesContent).map(data => new Language(data))
|
||||
|
||||
const regionsContent = await storage.json('regions.json')
|
||||
const regions = new Collection(regionsContent).map(data => new Region(data))
|
||||
|
||||
const subdivisionsContent = await storage.json('subdivisions.json')
|
||||
const subdivisions = new Collection(subdivisionsContent).map(data => new Subdivision(data))
|
||||
|
||||
const streams = await loadStreams({ channels, categories, languages })
|
||||
|
||||
const generatorsLogger = new Logger({
|
||||
stream: await new Storage(LOGS_DIR).createStream(`generators.log`)
|
||||
})
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.nsfw.m3u...')
|
||||
await new IndexNsfwGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
logger: generatorsLogger
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logger: generatorsLogger }).generate()
|
||||
|
||||
logger.info('generating index.region.m3u...')
|
||||
await new IndexRegionGenerator({ streams, regions, logger: generatorsLogger }).generate()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadStreams({
|
||||
channels,
|
||||
categories,
|
||||
languages
|
||||
}: {
|
||||
channels: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
}) {
|
||||
const groupedChannels = channels.keyBy(channel => channel.id)
|
||||
const groupedCategories = categories.keyBy(category => category.id)
|
||||
const groupedLanguages = languages.keyBy(language => language.code)
|
||||
|
||||
const db = new Database(DB_DIR)
|
||||
const dbStreams = await db.load('streams.db')
|
||||
const docs = await dbStreams.find({})
|
||||
const streams = new Collection(docs as any[])
|
||||
.map((data: any) => new Stream(data))
|
||||
.orderBy([(stream: Stream) => stream.channel, (stream: Stream) => stream.url], ['asc', 'asc'])
|
||||
.uniqBy((stream: Stream) => stream.channel || _.uniqueId())
|
||||
.map((stream: Stream) => {
|
||||
const channel: Channel | undefined = groupedChannels.get(stream.channel)
|
||||
|
||||
if (channel) {
|
||||
const channelCategories = channel.categories
|
||||
.map((id: string) => groupedCategories.get(id))
|
||||
.filter(Boolean)
|
||||
const channelLanguages = channel.languages
|
||||
.map((id: string) => groupedLanguages.get(id))
|
||||
.filter(Boolean)
|
||||
|
||||
stream.categories = channelCategories
|
||||
stream.languages = channelLanguages
|
||||
stream.broadcastArea = channel.broadcastArea
|
||||
stream.isNSFW = channel.isNSFW
|
||||
if (channel.logo) stream.logo = channel.logo
|
||||
} else {
|
||||
const file = new File(stream.filepath)
|
||||
const [_, countryCode] = file.getFilename().match(/^([a-z]{2})(_|$)/) || [null, null]
|
||||
const defaultBroadcastArea = countryCode ? [`c/${countryCode.toUpperCase()}`] : []
|
||||
|
||||
stream.broadcastArea = new Collection(defaultBroadcastArea)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
143
scripts/commands/playlist/update.ts
Normal file
143
scripts/commands/playlist/update.ts
Normal file
|
@ -0,0 +1,143 @@
|
|||
import { DB_DIR, DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Database, Storage, Logger, Collection, Dictionary, IssueLoader } from '../../core'
|
||||
import { Stream, Playlist, Channel } from '../../models'
|
||||
|
||||
let processedIssues = new Collection()
|
||||
let streams: Collection
|
||||
let groupedChannels: Dictionary
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const db = new Database(DB_DIR)
|
||||
const docs = await db.load('streams.db')
|
||||
const dbStreams = await docs.find({})
|
||||
|
||||
streams = new Collection(dbStreams as any[]).map(data => new Stream(data))
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
logger.info('removing broken streams...')
|
||||
await removeStreams(loader)
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams(loader)
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams(loader)
|
||||
|
||||
logger.info('normalizing links...')
|
||||
streams = streams.map(stream => {
|
||||
stream.normalizeURL()
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.name,
|
||||
(stream: Stream) => parseInt(stream.quality.replace('p', '')),
|
||||
(stream: Stream) => stream.label,
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (let filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath) || []
|
||||
|
||||
if (!streams.length) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
|
||||
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
|
||||
console.log(`OUTPUT=${output}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function removeStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:remove', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
const removed = streams.remove((_stream: Stream) => _stream.url === data.get('stream_url'))
|
||||
if (removed.notEmpty()) {
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:edit', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('stream_url')) return
|
||||
|
||||
let stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.get('stream_url')
|
||||
) as Stream
|
||||
|
||||
if (!stream) return
|
||||
|
||||
if (data.has('channel_id')) {
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
stream.channel = data.get('channel_id')
|
||||
stream.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
stream.line = -1
|
||||
stream.name = channel.name
|
||||
}
|
||||
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
if (data.has('label')) stream.label = data.get('label')
|
||||
if (data.has('quality')) stream.quality = data.get('quality')
|
||||
if (data.has('user_agent')) stream.userAgent = data.get('user_agent')
|
||||
if (data.has('http_referrer')) stream.httpReferrer = data.get('http_referrer')
|
||||
if (data.has('channel_name')) stream.name = data.get('channel_name')
|
||||
|
||||
streams.remove((_stream: Stream) => _stream.channel === stream.channel)
|
||||
streams.add(stream)
|
||||
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams(loader: IssueLoader) {
|
||||
const issues = await loader.load({ labels: ['streams:add', 'approved'] })
|
||||
issues.forEach((data: Dictionary) => {
|
||||
if (data.missing('channel_id') || data.missing('stream_url')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.get('stream_url'))) return
|
||||
|
||||
const channel = groupedChannels.get(data.get('channel_id'))
|
||||
|
||||
if (!channel) return
|
||||
|
||||
const stream = new Stream({
|
||||
channel: data.get('channel_id'),
|
||||
url: data.get('stream_url'),
|
||||
label: data.get('label'),
|
||||
quality: data.get('quality'),
|
||||
userAgent: data.get('user_agent'),
|
||||
httpReferrer: data.get('http_referrer'),
|
||||
filepath: `${channel.country.toLowerCase()}.m3u`,
|
||||
line: -1,
|
||||
name: data.get('channel_name') || channel.name
|
||||
})
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(data.get('issue_number'))
|
||||
})
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
const { file, logger, api, parser, id } = require('../../core')
|
||||
const { program } = require('commander')
|
||||
const chalk = require('chalk')
|
||||
const _ = require('lodash')
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const files = program.args.length ? program.args : await file.list('streams/*.m3u')
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
await api.channels.load()
|
||||
await api.blocklist.load()
|
||||
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = blocklist
|
||||
.map(blocked => {
|
||||
const channel = api.channels.find({ id: blocked.channel })
|
||||
if (!channel) return null
|
||||
return { ...blocked, name: channel.name }
|
||||
})
|
||||
.filter(i => i)
|
||||
logger.info(`found ${blocklist.length} records`)
|
||||
|
||||
let errors = []
|
||||
let warnings = []
|
||||
for (const filepath of files) {
|
||||
if (!filepath.endsWith('.m3u')) continue
|
||||
|
||||
const basename = file.basename(filepath)
|
||||
const [__, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
|
||||
|
||||
const buffer = {}
|
||||
const fileLog = []
|
||||
try {
|
||||
const playlist = await parser.parsePlaylist(filepath)
|
||||
for (const item of playlist.items) {
|
||||
if (item.tvg.id && !api.channels.find({ id: item.tvg.id })) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.tvg.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
if (item.url && buffer[item.url]) {
|
||||
fileLog.push({
|
||||
type: 'warning',
|
||||
line: item.line,
|
||||
message: `"${item.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer[item.url] = true
|
||||
}
|
||||
|
||||
const channel_id = id.generate(item.name, country)
|
||||
const found = blocklist.find(
|
||||
blocked =>
|
||||
item.tvg.id.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channel_id.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (found) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: item.line,
|
||||
message: `"${found.name}" is on the blocklist due to claims of copyright holders (${found.ref})`
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
fileLog.push({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: err.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (fileLog.length) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
fileLog.forEach(err => {
|
||||
const position = err.line.toString().padEnd(6, ' ')
|
||||
const type = err.type.padEnd(9, ' ')
|
||||
const status = err.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
logger.info(` ${chalk.gray(position)}${status}${err.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(fileLog.filter(e => e.type === 'error'))
|
||||
warnings = warnings.concat(fileLog.filter(e => e.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${errors.length + warnings.length} problems (${errors.length} errors, ${
|
||||
warnings.length
|
||||
} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.length) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
132
scripts/commands/playlist/validate.ts
Normal file
132
scripts/commands/playlist/validate.ts
Normal file
|
@ -0,0 +1,132 @@
|
|||
import { Logger, Storage, PlaylistParser, Collection, File, Dictionary } from '../../core'
|
||||
import { Channel, Stream, Blocked } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import { transliterate } from 'transliteration'
|
||||
import _ from 'lodash'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
|
||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
type LogItem = {
|
||||
type: string
|
||||
line: number
|
||||
message: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`loading blocklist...`)
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const channels = new Collection(channelsContent).map(data => new Channel(data))
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const blocklist = new Collection(blocklistContent).map(data => new Blocked(data))
|
||||
|
||||
logger.info(`found ${blocklist.count()} records`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({ storage: streamsStorage })
|
||||
const files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'm3u') continue
|
||||
|
||||
const [, countryCode] = file.basename().match(/([a-z]{2})(|_.*)\.m3u/i) || [null, '']
|
||||
|
||||
const log = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
try {
|
||||
const relativeFilepath = filepath.replace(STREAMS_DIR, '')
|
||||
const playlist = await parser.parse(relativeFilepath)
|
||||
playlist.streams.forEach((stream: Stream) => {
|
||||
const channelNotInDatabase =
|
||||
stream.channel && !channels.first((channel: Channel) => channel.id === stream.channel)
|
||||
if (channelNotInDatabase) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.channel}" is not in the database`
|
||||
})
|
||||
}
|
||||
|
||||
const alreadyOnPlaylist = stream.url && buffer.has(stream.url)
|
||||
if (alreadyOnPlaylist) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.line,
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const channelId = generateChannelId(stream.name, countryCode)
|
||||
const blocked = blocklist.first(
|
||||
blocked =>
|
||||
stream.channel.toLowerCase() === blocked.channel.toLowerCase() ||
|
||||
channelId.toLowerCase() === blocked.channel.toLowerCase()
|
||||
)
|
||||
if (blocked) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.line,
|
||||
message: `"${stream.name}" is on the blocklist due to claims of copyright holders (${blocked.ref})`
|
||||
})
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: 0,
|
||||
message: error.message.toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
if (log.notEmpty()) {
|
||||
logger.info(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
log.forEach((logItem: LogItem) => {
|
||||
const position = logItem.line.toString().padEnd(6, ' ')
|
||||
const type = logItem.type.padEnd(9, ' ')
|
||||
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
|
||||
logger.info(` ${chalk.gray(position)}${status}${logItem.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
|
||||
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(
|
||||
chalk.red(
|
||||
`\n${
|
||||
errors.count() + warnings.count()
|
||||
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.count()) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function generateChannelId(name: string, code: string) {
|
||||
if (!name || !code) return ''
|
||||
|
||||
name = name.replace(/ *\([^)]*\) */g, '')
|
||||
name = name.replace(/ *\[[^)]*\] */g, '')
|
||||
name = name.replace(/\+/gi, 'Plus')
|
||||
name = name.replace(/[^a-z\d]+/gi, '')
|
||||
name = name.trim()
|
||||
name = transliterate(name)
|
||||
code = code.toLowerCase()
|
||||
|
||||
return `${name}.${code}`
|
||||
}
|
|
@ -1,143 +0,0 @@
|
|||
const { file, markdown, parser, logger, api } = require('../../core')
|
||||
const { create: createTable } = require('../../core/table')
|
||||
const { program } = require('commander')
|
||||
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/tmp/logs/generators'
|
||||
|
||||
const options = program
|
||||
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
await createCategoryTable()
|
||||
await createCountryTable()
|
||||
await createLanguageTable()
|
||||
await createRegionTable()
|
||||
await updateReadme()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function createCategoryTable() {
|
||||
logger.info('creating category table...')
|
||||
const rows = []
|
||||
await api.categories.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/categories.log`)
|
||||
for (const item of items) {
|
||||
const id = file.getFilename(item.filepath)
|
||||
const category = await api.categories.find({ id })
|
||||
rows.push({
|
||||
name: category ? category.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_categories.md', table)
|
||||
}
|
||||
|
||||
async function createCountryTable() {
|
||||
logger.info('creating country table...')
|
||||
const rows = []
|
||||
await api.countries.load()
|
||||
await api.subdivisions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/countries.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const country = await api.countries.find({ code: code.toUpperCase() })
|
||||
if (country) {
|
||||
rows.push({
|
||||
name: `${country.flag} ${country.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else if (code === 'int') {
|
||||
rows.push({
|
||||
name: `🌍 International`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
} else {
|
||||
const subdivision = await api.subdivisions.find({ code: code.toUpperCase() })
|
||||
if (subdivision) {
|
||||
rows.push({
|
||||
name: ` ${subdivision.name}`,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Country' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_countries.md', table)
|
||||
}
|
||||
|
||||
async function createLanguageTable() {
|
||||
logger.info('creating language table...')
|
||||
const rows = []
|
||||
await api.languages.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/languages.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const language = await api.languages.find({ code })
|
||||
rows.push({
|
||||
name: language ? language.name : 'Undefined',
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_languages.md', table)
|
||||
}
|
||||
|
||||
async function createRegionTable() {
|
||||
logger.info('creating region table...')
|
||||
const rows = []
|
||||
await api.regions.load()
|
||||
const items = await parser.parseLogs(`${LOGS_DIR}/regions.log`)
|
||||
for (const item of items) {
|
||||
const code = file.getFilename(item.filepath)
|
||||
const region = await api.regions.find({ code: code.toUpperCase() })
|
||||
if (region) {
|
||||
rows.push({
|
||||
name: region.name,
|
||||
channels: item.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const table = createTable(rows, [
|
||||
{ name: 'Region', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_regions.md', table)
|
||||
}
|
||||
|
||||
async function updateReadme() {
|
||||
logger.info('updating readme.md...')
|
||||
const config = require(file.resolve(options.config))
|
||||
await file.createDir(file.dirname(config.build))
|
||||
await markdown.compile(options.config)
|
||||
}
|
24
scripts/commands/readme/update.ts
Normal file
24
scripts/commands/readme/update.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
import { CategoryTable, CountryTable, LanguageTable, RegionTable } from '../../tables'
|
||||
import { Logger, Markdown } from '../../core'
|
||||
import { README_DIR } from '../../constants'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('creating category table...')
|
||||
await new CategoryTable().make()
|
||||
logger.info('creating country table...')
|
||||
await new CountryTable().make()
|
||||
logger.info('creating language table...')
|
||||
await new LanguageTable().make()
|
||||
logger.info('creating region table...')
|
||||
await new RegionTable().make()
|
||||
|
||||
logger.info('updating readme.md...')
|
||||
const configPath = path.join(README_DIR, 'config.json')
|
||||
const readme = new Markdown(configPath)
|
||||
readme.compile()
|
||||
}
|
||||
|
||||
main()
|
|
@ -1,106 +0,0 @@
|
|||
const { api } = require('../../core')
|
||||
const { Octokit } = require('@octokit/core')
|
||||
const { paginateRest } = require('@octokit/plugin-paginate-rest')
|
||||
const CustomOctokit = Octokit.plugin(paginateRest)
|
||||
const _ = require('lodash')
|
||||
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './tmp/data'
|
||||
const OWNER = 'iptv-org'
|
||||
const REPO = 'iptv'
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
await api.channels.load()
|
||||
let channels = await api.channels.all()
|
||||
channels = _.keyBy(channels, 'id')
|
||||
|
||||
await api.blocklist.load()
|
||||
let blocklist = await api.blocklist.all()
|
||||
blocklist = _.keyBy(blocklist, 'channel')
|
||||
|
||||
await api.streams.load()
|
||||
let streams = await api.streams.all()
|
||||
streams = _.keyBy(streams, 'channel')
|
||||
|
||||
const channelRequests = await loadChannelRequests()
|
||||
const buffer = {}
|
||||
const report = channelRequests.map(r => {
|
||||
let result = {
|
||||
issueNumber: r.issue.number,
|
||||
channelId: r.channel.id || undefined,
|
||||
status: undefined
|
||||
}
|
||||
|
||||
if (!r.channel || !r.channel.id) result.status = 'error'
|
||||
else if (blocklist[r.channel.id]) result.status = 'blocked'
|
||||
else if (!channels[r.channel.id]) result.status = 'invalid_id'
|
||||
else if (streams[r.channel.id]) result.status = 'fullfilled'
|
||||
else if (buffer[r.channel.id] && !r.channel.url) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
||||
buffer[r.channel.id] = true
|
||||
|
||||
return result
|
||||
})
|
||||
console.table(report)
|
||||
} catch (err) {
|
||||
console.log(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadChannelRequests() {
|
||||
const issues = await fetchIssues('channel request')
|
||||
|
||||
return issues.map(parseIssue)
|
||||
}
|
||||
|
||||
async function fetchIssues(labels) {
|
||||
const issues = await octokit.paginate('GET /repos/{owner}/{repo}/issues', {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
direction: 'asc',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
|
||||
return issues
|
||||
}
|
||||
|
||||
function parseIssue(issue) {
|
||||
const buffer = {}
|
||||
const channel = {}
|
||||
const fields = {
|
||||
'Channel ID (required)': 'id',
|
||||
'Channel ID': 'id',
|
||||
'Stream URL (optional)': 'url',
|
||||
'Stream URL': 'url',
|
||||
'Notes (optional)': 'notes',
|
||||
Notes: 'notes'
|
||||
}
|
||||
|
||||
const matches = issue.body.match(/### ([^\r\n]+)\s+([^\r\n]+)/g)
|
||||
|
||||
if (!matches) return { issue, channel: null }
|
||||
|
||||
matches.forEach(item => {
|
||||
const [, fieldLabel, value] = item.match(/### ([^\r\n]+)\s+([^\r\n]+)/)
|
||||
const field = fields[fieldLabel]
|
||||
|
||||
if (!field) return
|
||||
|
||||
buffer[field] = value === '_No response_' ? undefined : value.trim()
|
||||
})
|
||||
|
||||
for (let field in buffer) {
|
||||
channel[field] = buffer[field]
|
||||
}
|
||||
|
||||
return { issue, channel }
|
||||
}
|
53
scripts/commands/report/create.ts
Normal file
53
scripts/commands/report/create.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { DATA_DIR } from '../../constants'
|
||||
import { Collection, Dictionary, IssueLoader, Storage } from '../../core'
|
||||
import { Blocked, Channel, Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const loader = new IssueLoader()
|
||||
|
||||
const storage = new Storage(DATA_DIR)
|
||||
|
||||
const channelsContent = await storage.json('channels.json')
|
||||
const groupedChannels = new Collection(channelsContent)
|
||||
.map(data => new Channel(data))
|
||||
.groupBy((channel: Channel) => channel.id)
|
||||
|
||||
const streamsContent = await storage.json('streams.json')
|
||||
const groupedStreams = new Collection(streamsContent)
|
||||
.map(data => new Stream(data))
|
||||
.groupBy((stream: Stream) => stream.url)
|
||||
|
||||
const blocklistContent = await storage.json('blocklist.json')
|
||||
const groupedBlocklist = new Collection(blocklistContent)
|
||||
.map(data => new Blocked(data))
|
||||
.groupBy((blocked: Blocked) => blocked.channel)
|
||||
|
||||
const issues = await loader.load({ labels: ['streams:add'] })
|
||||
|
||||
const buffer = new Dictionary()
|
||||
const report = issues.map(data => {
|
||||
const channelId = data.get('channel_id') || undefined
|
||||
const streamUrl = data.get('stream_url') || undefined
|
||||
|
||||
const result = new Dictionary({
|
||||
issueNumber: data.get('issue_number'),
|
||||
channelId,
|
||||
status: undefined
|
||||
})
|
||||
|
||||
if (!channelId || !streamUrl) result.set('status', 'error')
|
||||
else if (groupedBlocklist.has(channelId)) result.set('status', 'blocked')
|
||||
else if (groupedChannels.missing(channelId)) result.set('status', 'invalid_id')
|
||||
else if (groupedStreams.has(streamUrl)) result.set('status', 'fullfilled')
|
||||
else if (buffer.has(streamUrl)) result.set('status', 'duplicate')
|
||||
else result.set('status', 'pending')
|
||||
|
||||
buffer.set(streamUrl, true)
|
||||
|
||||
return result.data()
|
||||
})
|
||||
|
||||
console.table(report.all())
|
||||
}
|
||||
|
||||
main()
|
11
scripts/constants.ts
Normal file
11
scripts/constants.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
export const ROOT_DIR = process.env.ROOT_DIR || './'
|
||||
export const STREAMS_DIR = process.env.STREAMS_DIR || './streams'
|
||||
export const PUBLIC_DIR = process.env.PUBLIC_DIR || './.gh-pages'
|
||||
export const README_DIR = process.env.README_DIR || './.readme'
|
||||
export const API_DIR = process.env.API_DIR || './.api'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const LOGS_DIR = process.env.LOGS_DIR || './temp/logs'
|
||||
export const DB_DIR = process.env.DB_DIR || './temp/database'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'iptv'
|
|
@ -1,41 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
const file = require('./file')
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || './scripts/tmp/data'
|
||||
|
||||
class API {
|
||||
constructor(filepath) {
|
||||
this.filepath = file.resolve(filepath)
|
||||
}
|
||||
|
||||
async load() {
|
||||
const data = await file.read(this.filepath)
|
||||
this.collection = JSON.parse(data)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return _.find(this.collection, query)
|
||||
}
|
||||
|
||||
filter(query) {
|
||||
return _.filter(this.collection, query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.collection
|
||||
}
|
||||
}
|
||||
|
||||
const api = {}
|
||||
|
||||
api.channels = new API(`${DATA_DIR}/channels.json`)
|
||||
api.streams = new API(`${DATA_DIR}/streams.json`)
|
||||
api.countries = new API(`${DATA_DIR}/countries.json`)
|
||||
api.guides = new API(`${DATA_DIR}/guides.json`)
|
||||
api.categories = new API(`${DATA_DIR}/categories.json`)
|
||||
api.languages = new API(`${DATA_DIR}/languages.json`)
|
||||
api.regions = new API(`${DATA_DIR}/regions.json`)
|
||||
api.blocklist = new API(`${DATA_DIR}/blocklist.json`)
|
||||
api.subdivisions = new API(`${DATA_DIR}/subdivisions.json`)
|
||||
|
||||
module.exports = api
|
|
@ -1,19 +0,0 @@
|
|||
const IPTVChecker = require('iptv-checker')
|
||||
|
||||
const checker = {}
|
||||
|
||||
checker.check = async function (item, config) {
|
||||
const ic = new IPTVChecker(config)
|
||||
const result = await ic.checkStream({ url: item.url, http: item.http })
|
||||
|
||||
return {
|
||||
_id: item._id,
|
||||
url: item.url,
|
||||
http: item.http,
|
||||
error: !result.status.ok ? { code: result.status.code, message: result.status.message } : null,
|
||||
streams: result.status.ok ? result.status.metadata.streams : [],
|
||||
requests: result.status.ok ? result.status.metadata.requests : []
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = checker
|
175
scripts/core/collection.ts
Normal file
175
scripts/core/collection.ts
Normal file
|
@ -0,0 +1,175 @@
|
|||
import _ from 'lodash'
|
||||
import { orderBy, Order } from 'natural-orderby'
|
||||
import { Dictionary } from './'
|
||||
|
||||
type Iteratee = (value: any, value2?: any) => void
|
||||
|
||||
export class Collection {
|
||||
_items: any[]
|
||||
|
||||
constructor(items?: any[]) {
|
||||
this._items = Array.isArray(items) ? items : []
|
||||
}
|
||||
|
||||
first(predicate?: Iteratee) {
|
||||
if (predicate) {
|
||||
return this._items.find(predicate)
|
||||
}
|
||||
|
||||
return this._items[0]
|
||||
}
|
||||
|
||||
last(predicate?: Iteratee) {
|
||||
if (predicate) {
|
||||
return _.findLast(this._items, predicate)
|
||||
}
|
||||
|
||||
return this._items[this._items.length - 1]
|
||||
}
|
||||
|
||||
find(iteratee: Iteratee): Collection {
|
||||
const found = this._items.filter(iteratee)
|
||||
|
||||
return new Collection(found)
|
||||
}
|
||||
|
||||
add(data: any) {
|
||||
this._items.push(data)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
intersects(collection: Collection): boolean {
|
||||
return _.intersection(this._items, collection.all()).length > 0
|
||||
}
|
||||
|
||||
count() {
|
||||
return this._items.length
|
||||
}
|
||||
|
||||
join(separator: string) {
|
||||
return this._items.join(separator)
|
||||
}
|
||||
|
||||
indexOf(value: string) {
|
||||
return this._items.indexOf(value)
|
||||
}
|
||||
|
||||
push(data: any) {
|
||||
this.add(data)
|
||||
}
|
||||
|
||||
uniq() {
|
||||
const items = _.uniq(this._items)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
reduce(iteratee: Iteratee, accumulator: any) {
|
||||
const items = _.reduce(this._items, iteratee, accumulator)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
filter(iteratee: Iteratee) {
|
||||
const items = _.filter(this._items, iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
forEach(iteratee: Iteratee) {
|
||||
for (let item of this._items) {
|
||||
iteratee(item)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
remove(iteratee: Iteratee): Collection {
|
||||
const removed = _.remove(this._items, iteratee)
|
||||
|
||||
return new Collection(removed)
|
||||
}
|
||||
|
||||
concat(collection: Collection) {
|
||||
const items = this._items.concat(collection._items)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this._items.length === 0
|
||||
}
|
||||
|
||||
notEmpty(): boolean {
|
||||
return this._items.length > 0
|
||||
}
|
||||
|
||||
sort() {
|
||||
const items = this._items.sort()
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
orderBy(iteratees: Iteratee | Iteratee[], orders?: Order | Order[]) {
|
||||
const items = orderBy(this._items, iteratees, orders)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
keyBy(iteratee: Iteratee) {
|
||||
const items = _.keyBy(this._items, iteratee)
|
||||
|
||||
return new Dictionary(items)
|
||||
}
|
||||
|
||||
empty() {
|
||||
return this._items.length === 0
|
||||
}
|
||||
|
||||
includes(value: any) {
|
||||
if (typeof value === 'function') {
|
||||
const found = this._items.find(value)
|
||||
|
||||
return !!found
|
||||
}
|
||||
|
||||
return this._items.includes(value)
|
||||
}
|
||||
|
||||
missing(value: any) {
|
||||
if (typeof value === 'function') {
|
||||
const found = this._items.find(value)
|
||||
|
||||
return !found
|
||||
}
|
||||
|
||||
return !this._items.includes(value)
|
||||
}
|
||||
|
||||
uniqBy(iteratee: Iteratee) {
|
||||
const items = _.uniqBy(this._items, iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
groupBy(iteratee: Iteratee) {
|
||||
const object = _.groupBy(this._items, iteratee)
|
||||
|
||||
return new Dictionary(object)
|
||||
}
|
||||
|
||||
map(iteratee: Iteratee) {
|
||||
const items = this._items.map(iteratee)
|
||||
|
||||
return new Collection(items)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this._items
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return JSON.stringify(this._items)
|
||||
}
|
||||
}
|
22
scripts/core/database.ts
Normal file
22
scripts/core/database.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
import Datastore from '@seald-io/nedb'
|
||||
import * as path from 'path'
|
||||
|
||||
export class Database {
|
||||
rootDir: string
|
||||
|
||||
constructor(rootDir: string) {
|
||||
this.rootDir = rootDir
|
||||
}
|
||||
|
||||
async load(filepath: string) {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return new Datastore({
|
||||
filename: path.resolve(absFilepath),
|
||||
autoload: true,
|
||||
onload: (error: Error): any => {
|
||||
if (error) console.error(error.message)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = {}
|
||||
|
||||
date.utc = d => {
|
||||
return dayjs.utc(d)
|
||||
}
|
||||
|
||||
module.exports = date
|
|
@ -1,82 +0,0 @@
|
|||
const nedb = require('nedb-promises')
|
||||
const fs = require('fs-extra')
|
||||
const file = require('./file')
|
||||
|
||||
const DB_DIR = process.env.DB_DIR || './scripts/tmp/database'
|
||||
|
||||
fs.ensureDirSync(DB_DIR)
|
||||
|
||||
class Database {
|
||||
constructor(filepath) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
load() {
|
||||
this.db = nedb.create({
|
||||
filename: file.resolve(this.filepath),
|
||||
autoload: true,
|
||||
onload: err => {
|
||||
if (err) console.error(err)
|
||||
},
|
||||
compareStrings: (a, b) => {
|
||||
a = a.replace(/\s/g, '_')
|
||||
b = b.replace(/\s/g, '_')
|
||||
|
||||
return a.localeCompare(b, undefined, {
|
||||
sensitivity: 'accent',
|
||||
numeric: true
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
removeIndex(field) {
|
||||
return this.db.removeIndex(field)
|
||||
}
|
||||
|
||||
addIndex(options) {
|
||||
return this.db.ensureIndex(options)
|
||||
}
|
||||
|
||||
compact() {
|
||||
return this.db.persistence.compactDatafile()
|
||||
}
|
||||
|
||||
stopAutocompact() {
|
||||
return this.db.persistence.stopAutocompaction()
|
||||
}
|
||||
|
||||
reset() {
|
||||
return file.clear(this.filepath)
|
||||
}
|
||||
|
||||
count(query) {
|
||||
return this.db.count(query)
|
||||
}
|
||||
|
||||
insert(doc) {
|
||||
return this.db.insert(doc)
|
||||
}
|
||||
|
||||
update(query, update) {
|
||||
return this.db.update(query, update)
|
||||
}
|
||||
|
||||
find(query) {
|
||||
return this.db.find(query)
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.find({})
|
||||
}
|
||||
|
||||
remove(query, options) {
|
||||
return this.db.remove(query, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = {}
|
||||
|
||||
db.streams = new Database(`${DB_DIR}/streams.db`)
|
||||
|
||||
module.exports = db
|
31
scripts/core/dictionary.ts
Normal file
31
scripts/core/dictionary.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
export class Dictionary {
|
||||
dict: any
|
||||
|
||||
constructor(dict?: any) {
|
||||
this.dict = dict || {}
|
||||
}
|
||||
|
||||
set(key: string, value: any) {
|
||||
this.dict[key] = value
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
return !!this.dict[key]
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return !this.dict[key]
|
||||
}
|
||||
|
||||
get(key: string): any {
|
||||
return this.dict[key] ? this.dict[key] : undefined
|
||||
}
|
||||
|
||||
keys(): string[] {
|
||||
return Object.keys(this.dict)
|
||||
}
|
||||
|
||||
data() {
|
||||
return this.dict
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
const { create: createPlaylist } = require('./playlist')
|
||||
const store = require('./store')
|
||||
const path = require('path')
|
||||
const glob = require('glob')
|
||||
const fs = require('fs-extra')
|
||||
const _ = require('lodash')
|
||||
|
||||
const file = {}
|
||||
|
||||
file.list = function (pattern) {
|
||||
return new Promise(resolve => {
|
||||
glob(pattern, function (err, files) {
|
||||
resolve(files)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
file.getFilename = function (filepath) {
|
||||
return path.parse(filepath).name
|
||||
}
|
||||
|
||||
file.createDir = async function (dir) {
|
||||
if (await file.exists(dir)) return
|
||||
|
||||
return fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
file.exists = function (filepath) {
|
||||
return fs.exists(path.resolve(filepath))
|
||||
}
|
||||
|
||||
file.read = function (filepath) {
|
||||
return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error)
|
||||
}
|
||||
|
||||
file.append = function (filepath, data) {
|
||||
return fs.appendFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.create = function (filepath, data = '') {
|
||||
filepath = path.resolve(filepath)
|
||||
const dir = path.dirname(filepath)
|
||||
|
||||
return file
|
||||
.createDir(dir)
|
||||
.then(() => fs.writeFile(filepath, data, { encoding: 'utf8', flag: 'w' }))
|
||||
.catch(console.error)
|
||||
}
|
||||
|
||||
file.write = function (filepath, data = '') {
|
||||
return fs.writeFile(path.resolve(filepath), data).catch(console.error)
|
||||
}
|
||||
|
||||
file.clear = function (filepath) {
|
||||
return file.write(filepath, '')
|
||||
}
|
||||
|
||||
file.resolve = function (filepath) {
|
||||
return path.resolve(filepath)
|
||||
}
|
||||
|
||||
file.dirname = function (filepath) {
|
||||
return path.dirname(filepath)
|
||||
}
|
||||
|
||||
file.basename = function (filepath) {
|
||||
return path.basename(filepath)
|
||||
}
|
||||
|
||||
module.exports = file
|
31
scripts/core/file.ts
Normal file
31
scripts/core/file.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
import * as path from 'path'
|
||||
|
||||
export class File {
|
||||
filepath: string
|
||||
content: string
|
||||
|
||||
constructor(filepath: string, content?: string) {
|
||||
this.filepath = filepath
|
||||
this.content = content || ''
|
||||
}
|
||||
|
||||
getFilename() {
|
||||
return path.parse(this.filepath).name
|
||||
}
|
||||
|
||||
dirname() {
|
||||
return path.dirname(this.filepath)
|
||||
}
|
||||
|
||||
basename() {
|
||||
return path.basename(this.filepath)
|
||||
}
|
||||
|
||||
append(data: string) {
|
||||
this.content = this.content + data
|
||||
}
|
||||
|
||||
extension() {
|
||||
return this.filepath.split('.').pop()
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const { create: createPlaylist } = require('./playlist')
|
||||
const generators = require('../generators')
|
||||
const logger = require('./logger')
|
||||
const file = require('./file')
|
||||
|
||||
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
|
||||
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/tmp/logs/generators'
|
||||
|
||||
const generator = {}
|
||||
|
||||
generator.generate = async function (name, streams = []) {
|
||||
if (typeof generators[name] === 'function') {
|
||||
try {
|
||||
let output = await generators[name].bind()(streams)
|
||||
output = Array.isArray(output) ? output : [output]
|
||||
for (const type of output) {
|
||||
const playlist = createPlaylist(type.items, { public: true })
|
||||
await file.create(`${PUBLIC_DIR}/${type.filepath}`, playlist.toString())
|
||||
}
|
||||
await file.create(`${LOGS_DIR}/${name}.log`, output.map(toJSON).join('\n'))
|
||||
} catch (error) {
|
||||
logger.error(`generators/${name}.js: ${error.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = generator
|
||||
|
||||
function toJSON(type) {
|
||||
type.count = type.items.length
|
||||
delete type.items
|
||||
return JSON.stringify(type)
|
||||
}
|
46
scripts/core/htmlTable.ts
Normal file
46
scripts/core/htmlTable.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
type DataItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (let column of this.columns) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (let item of this.data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (let prop in item) {
|
||||
const column = this.columns[i]
|
||||
let nowrap = column.nowrap ? ` nowrap` : ''
|
||||
let align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
const { transliterate } = require('transliteration')
|
||||
|
||||
const id = {}
|
||||
|
||||
id.generate = function (name, code) {
|
||||
if (!name || !code) return null
|
||||
|
||||
name = name.replace(/ *\([^)]*\) */g, '')
|
||||
name = name.replace(/ *\[[^)]*\] */g, '')
|
||||
name = name.replace(/\+/gi, 'Plus')
|
||||
name = name.replace(/[^a-z\d]+/gi, '')
|
||||
name = name.trim()
|
||||
name = transliterate(name)
|
||||
code = code.toLowerCase()
|
||||
|
||||
return `${name}.${code}`
|
||||
}
|
||||
|
||||
module.exports = id
|
|
@ -1,14 +0,0 @@
|
|||
exports.db = require('./db')
|
||||
exports.logger = require('./logger')
|
||||
exports.file = require('./file')
|
||||
exports.timer = require('./timer')
|
||||
exports.parser = require('./parser')
|
||||
exports.checker = require('./checker')
|
||||
exports.generator = require('./generator')
|
||||
exports.playlist = require('./playlist')
|
||||
exports.store = require('./store')
|
||||
exports.markdown = require('./markdown')
|
||||
exports.api = require('./api')
|
||||
exports.id = require('./id')
|
||||
exports.m3u = require('./m3u')
|
||||
exports.date = require('./date')
|
14
scripts/core/index.ts
Normal file
14
scripts/core/index.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
export * from './database'
|
||||
export * from './logger'
|
||||
export * from './playlistParser'
|
||||
export * from './numberParser'
|
||||
export * from './logParser'
|
||||
export * from './markdown'
|
||||
export * from './file'
|
||||
export * from './collection'
|
||||
export * from './dictionary'
|
||||
export * from './storage'
|
||||
export * from './url'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './htmlTable'
|
46
scripts/core/issueLoader.ts
Normal file
46
scripts/core/issueLoader.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { Collection, IssueParser } from './'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load({ labels }: { labels: string[] | string }) {
|
||||
labels = Array.isArray(labels) ? labels.join(',') : labels
|
||||
let issues: any[] = []
|
||||
if (TESTING) {
|
||||
switch (labels) {
|
||||
case 'streams:add':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_add')).default
|
||||
break
|
||||
case 'streams:add,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_add_approved')).default
|
||||
break
|
||||
case 'streams:edit,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_edit_approved')).default
|
||||
break
|
||||
case 'streams:remove,approved':
|
||||
issues = (await import('../../tests/__data__/input/issues/streams_remove_approved'))
|
||||
.default
|
||||
break
|
||||
}
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
48
scripts/core/issueParser.ts
Normal file
48
scripts/core/issueParser.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
import { Dictionary } from './'
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: any): Dictionary {
|
||||
const data = new Dictionary()
|
||||
data.set('issue_number', issue.number)
|
||||
|
||||
const idDict = new Dictionary({
|
||||
'Channel ID': 'channel_id',
|
||||
'Channel ID (required)': 'channel_id',
|
||||
'Broken Link': 'stream_url',
|
||||
'Stream URL': 'stream_url',
|
||||
'Stream URL (optional)': 'stream_url',
|
||||
'Stream URL (required)': 'stream_url',
|
||||
Label: 'label',
|
||||
Quality: 'quality',
|
||||
'Channel Name': 'channel_name',
|
||||
'HTTP User-Agent': 'user_agent',
|
||||
'HTTP Referrer': 'http_referrer',
|
||||
Reason: 'reason',
|
||||
'What happened to the stream?': 'reason',
|
||||
'Possible Replacement (optional)': 'possible_replacement',
|
||||
Notes: 'notes',
|
||||
'Notes (optional)': 'notes'
|
||||
})
|
||||
|
||||
const fields = issue.body.split('###')
|
||||
|
||||
if (!fields.length) return data
|
||||
|
||||
fields.forEach((field: string) => {
|
||||
let [_label, , _value] = field.split(/\r?\n/)
|
||||
_label = _label ? _label.trim() : ''
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = idDict.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
return data
|
||||
}
|
||||
}
|
13
scripts/core/logParser.ts
Normal file
13
scripts/core/logParser.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
export type LogItem = {
|
||||
filepath: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export class LogParser {
|
||||
parse(content: string): any[] {
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
const { Signale } = require('signale')
|
||||
|
||||
const options = {}
|
||||
|
||||
const logger = new Signale(options)
|
||||
|
||||
logger.config({
|
||||
displayLabel: false,
|
||||
displayScope: false,
|
||||
displayBadge: false
|
||||
})
|
||||
|
||||
module.exports = logger
|
9
scripts/core/logger.ts
Normal file
9
scripts/core/logger.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
import signale from 'signale'
|
||||
|
||||
const { Signale } = signale
|
||||
|
||||
export class Logger extends Signale {
|
||||
constructor(options?: any) {
|
||||
super(options)
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
const m3u = {}
|
||||
|
||||
m3u.create = function (links = [], header = {}) {
|
||||
let output = `#EXTM3U`
|
||||
for (const attr in header) {
|
||||
const value = header[attr]
|
||||
output += ` ${attr}="${value}"`
|
||||
}
|
||||
output += `\n`
|
||||
|
||||
for (const link of links) {
|
||||
output += `#EXTINF:-1`
|
||||
for (const name in link.attrs) {
|
||||
const value = link.attrs[name]
|
||||
if (value !== undefined) {
|
||||
output += ` ${name}="${value}"`
|
||||
}
|
||||
}
|
||||
output += `,${link.title}\n`
|
||||
|
||||
for (const name in link.vlcOpts) {
|
||||
const value = link.vlcOpts[name]
|
||||
if (value !== undefined) {
|
||||
output += `#EXTVLCOPT:${name}=${value}\n`
|
||||
}
|
||||
}
|
||||
|
||||
output += `${link.url}\n`
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = m3u
|
|
@ -1,10 +0,0 @@
|
|||
const markdownInclude = require('markdown-include')
|
||||
const file = require('./file')
|
||||
|
||||
const markdown = {}
|
||||
|
||||
markdown.compile = function (filepath) {
|
||||
markdownInclude.compileFiles(file.resolve(filepath))
|
||||
}
|
||||
|
||||
module.exports = markdown
|
13
scripts/core/markdown.ts
Normal file
13
scripts/core/markdown.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import markdownInclude from 'markdown-include'
|
||||
|
||||
export class Markdown {
|
||||
filepath: string
|
||||
|
||||
constructor(filepath: string) {
|
||||
this.filepath = filepath
|
||||
}
|
||||
|
||||
compile() {
|
||||
markdownInclude.compileFiles(this.filepath)
|
||||
}
|
||||
}
|
10
scripts/core/numberParser.ts
Normal file
10
scripts/core/numberParser.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
export default class NumberParser {
|
||||
async parse(number: string) {
|
||||
const parsed = parseInt(number)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('numberParser:parse() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
const ipp = require('iptv-playlist-parser')
|
||||
const logger = require('./logger')
|
||||
const file = require('./file')
|
||||
|
||||
const parser = {}
|
||||
|
||||
parser.parsePlaylist = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
|
||||
return ipp.parse(content)
|
||||
}
|
||||
|
||||
parser.parseLogs = async function (filepath) {
|
||||
const content = await file.read(filepath)
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
|
||||
parser.parseNumber = function (string) {
|
||||
const parsed = parseInt(string)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('scripts/core/parser.js:parseNumber() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
module.exports = parser
|
|
@ -1,53 +0,0 @@
|
|||
const store = require('./store')
|
||||
const m3u = require('./m3u')
|
||||
const _ = require('lodash')
|
||||
|
||||
const playlist = {}
|
||||
|
||||
class Playlist {
|
||||
constructor(items = [], options = {}) {
|
||||
this.header = {}
|
||||
|
||||
this.links = []
|
||||
for (const item of items) {
|
||||
const stream = store.create(item)
|
||||
|
||||
let attrs
|
||||
if (options.public) {
|
||||
attrs = {
|
||||
'tvg-id': stream.get('tvg_id'),
|
||||
'tvg-logo': stream.get('tvg_logo'),
|
||||
'group-title': stream.get('group_title'),
|
||||
'user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
} else {
|
||||
attrs = {
|
||||
'tvg-id': stream.get('tvg_id'),
|
||||
'user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
}
|
||||
|
||||
const vlcOpts = {
|
||||
'http-referrer': stream.get('http_referrer') || undefined,
|
||||
'http-user-agent': stream.get('user_agent') || undefined
|
||||
}
|
||||
|
||||
this.links.push({
|
||||
url: stream.get('url'),
|
||||
title: stream.get('title'),
|
||||
attrs,
|
||||
vlcOpts
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
toString() {
|
||||
return m3u.create(this.links, this.header)
|
||||
}
|
||||
}
|
||||
|
||||
playlist.create = function (items, options) {
|
||||
return new Playlist(items, options)
|
||||
}
|
||||
|
||||
module.exports = playlist
|
45
scripts/core/playlistParser.ts
Normal file
45
scripts/core/playlistParser.ts
Normal file
|
@ -0,0 +1,45 @@
|
|||
import parser from 'iptv-playlist-parser'
|
||||
import { Playlist, Stream } from '../models'
|
||||
import { Collection, Storage } from './'
|
||||
|
||||
export class PlaylistParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: { storage: Storage }) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string): Promise<Playlist> {
|
||||
const streams = new Collection()
|
||||
|
||||
const content = await this.storage.read(filepath)
|
||||
const parsed: parser.Playlist = parser.parse(content)
|
||||
|
||||
parsed.items.forEach((item: parser.PlaylistItem) => {
|
||||
const { name, label, quality } = parseTitle(item.name)
|
||||
const stream = new Stream({
|
||||
channel: item.tvg.id,
|
||||
name,
|
||||
label,
|
||||
quality,
|
||||
filepath,
|
||||
line: item.line,
|
||||
url: item.url,
|
||||
httpReferrer: item.http.referrer,
|
||||
userAgent: item.http['user-agent']
|
||||
})
|
||||
|
||||
streams.add(stream)
|
||||
})
|
||||
|
||||
return new Playlist(streams)
|
||||
}
|
||||
}
|
||||
|
||||
function parseTitle(title: string): { name: string; label: string; quality: string } {
|
||||
const [, label] = title.match(/ \[(.*)\]$/) || [null, '']
|
||||
const [, quality] = title.match(/ \(([0-9]+p)\)/) || [null, '']
|
||||
const name = title.replace(` (${quality})`, '').replace(` [${label}]`, '')
|
||||
|
||||
return { name, label, quality }
|
||||
}
|
82
scripts/core/storage.ts
Normal file
82
scripts/core/storage.ts
Normal file
|
@ -0,0 +1,82 @@
|
|||
import { File, Collection } from './'
|
||||
import * as path from 'path'
|
||||
import fs from 'fs-extra'
|
||||
import { glob } from 'glob'
|
||||
|
||||
export class Storage {
|
||||
rootDir: string
|
||||
|
||||
constructor(rootDir?: string) {
|
||||
this.rootDir = rootDir || './'
|
||||
}
|
||||
|
||||
list(pattern: string): Promise<string[]> {
|
||||
return glob(pattern, {
|
||||
cwd: this.rootDir
|
||||
})
|
||||
}
|
||||
|
||||
async createDir(dir: string): Promise<void> {
|
||||
if (await fs.exists(dir)) return
|
||||
|
||||
await fs.mkdir(dir, { recursive: true }).catch(console.error)
|
||||
}
|
||||
|
||||
async load(filepath: string): Promise<any> {
|
||||
return this.read(filepath)
|
||||
}
|
||||
|
||||
async read(filepath: string): Promise<any> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return await fs.readFile(absFilepath, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
async json(filepath: string): Promise<any> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const content = await fs.readFile(absFilepath, { encoding: 'utf8' })
|
||||
|
||||
return JSON.parse(content)
|
||||
}
|
||||
|
||||
async exists(filepath: string): Promise<boolean> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
return await fs.exists(absFilepath)
|
||||
}
|
||||
|
||||
async write(filepath: string, data: string = ''): Promise<void> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const dir = path.dirname(absFilepath)
|
||||
|
||||
await this.createDir(dir)
|
||||
await fs.writeFile(absFilepath, data, { encoding: 'utf8', flag: 'w' })
|
||||
}
|
||||
|
||||
async append(filepath: string, data: string = ''): Promise<void> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
|
||||
await fs.appendFile(absFilepath, data, { encoding: 'utf8', flag: 'w' })
|
||||
}
|
||||
|
||||
async clear(filepath: string): Promise<void> {
|
||||
await this.write(filepath)
|
||||
}
|
||||
|
||||
async createStream(filepath: string): Promise<NodeJS.WriteStream> {
|
||||
const absFilepath = path.join(this.rootDir, filepath)
|
||||
const dir = path.dirname(absFilepath)
|
||||
|
||||
await this.createDir(dir)
|
||||
|
||||
return fs.createWriteStream(absFilepath) as unknown as NodeJS.WriteStream
|
||||
}
|
||||
|
||||
async save(filepath: string, content: string): Promise<void> {
|
||||
await this.write(filepath, content)
|
||||
}
|
||||
|
||||
async saveFile(file: File): Promise<void> {
|
||||
await this.write(file.filepath, file.content)
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
const logger = require('./logger')
|
||||
const setters = require('../store/setters')
|
||||
const getters = require('../store/getters')
|
||||
|
||||
module.exports = {
|
||||
create(state = {}) {
|
||||
return {
|
||||
state,
|
||||
changed: false,
|
||||
set: function (prop, value) {
|
||||
const prevState = JSON.stringify(this.state)
|
||||
|
||||
const setter = setters[prop]
|
||||
if (typeof setter === 'function') {
|
||||
try {
|
||||
this.state[prop] = setter.bind()(value)
|
||||
} catch (error) {
|
||||
logger.error(`store/setters/${prop}.js: ${error.message}`)
|
||||
}
|
||||
} else if (typeof value === 'object') {
|
||||
this.state[prop] = value[prop]
|
||||
} else {
|
||||
this.state[prop] = value
|
||||
}
|
||||
|
||||
const newState = JSON.stringify(this.state)
|
||||
if (prevState !== newState) {
|
||||
this.changed = true
|
||||
}
|
||||
|
||||
return this
|
||||
},
|
||||
get: function (prop) {
|
||||
const getter = getters[prop]
|
||||
if (typeof getter === 'function') {
|
||||
try {
|
||||
return getter.bind(this.state)()
|
||||
} catch (error) {
|
||||
logger.error(`store/getters/${prop}.js: ${error.message}`)
|
||||
}
|
||||
} else {
|
||||
return prop.split('.').reduce((o, i) => (o ? o[i] : undefined), this.state)
|
||||
}
|
||||
},
|
||||
has: function (prop) {
|
||||
const value = this.get(prop)
|
||||
|
||||
return !_.isEmpty(value)
|
||||
},
|
||||
data: function () {
|
||||
return this.state
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
const table = {}
|
||||
|
||||
table.create = function (data, cols) {
|
||||
let output = '<table>\n'
|
||||
|
||||
output += ' <thead>\n <tr>'
|
||||
for (let column of cols) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\n </thead>\n'
|
||||
|
||||
output += ' <tbody>\n'
|
||||
for (let item of data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (let prop in item) {
|
||||
const column = cols[i]
|
||||
let nowrap = column.nowrap ? ` nowrap` : ''
|
||||
let align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\n'
|
||||
}
|
||||
output += ' </tbody>\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
module.exports = table
|
|
@ -1,29 +0,0 @@
|
|||
const { performance } = require('perf_hooks')
|
||||
const dayjs = require('dayjs')
|
||||
const duration = require('dayjs/plugin/duration')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
|
||||
dayjs.extend(relativeTime)
|
||||
dayjs.extend(duration)
|
||||
|
||||
const timer = {}
|
||||
|
||||
let t0 = 0
|
||||
|
||||
timer.start = function () {
|
||||
t0 = performance.now()
|
||||
}
|
||||
|
||||
timer.format = function (f) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).format(f)
|
||||
}
|
||||
|
||||
timer.humanize = function (suffix = true) {
|
||||
let t1 = performance.now()
|
||||
|
||||
return dayjs.duration(t1 - t0).humanize(suffix)
|
||||
}
|
||||
|
||||
module.exports = timer
|
|
@ -1,11 +0,0 @@
|
|||
const normalize = require('normalize-url')
|
||||
|
||||
const url = {}
|
||||
|
||||
url.normalize = function (string) {
|
||||
const normalized = normalize(string, { stripWWW: false })
|
||||
|
||||
return decodeURIComponent(normalized).replace(/\s/g, '+')
|
||||
}
|
||||
|
||||
module.exports = url
|
20
scripts/core/url.ts
Normal file
20
scripts/core/url.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import normalizeUrl from 'normalize-url'
|
||||
|
||||
export class URL {
|
||||
url: string
|
||||
|
||||
constructor(url: string) {
|
||||
this.url = url
|
||||
}
|
||||
|
||||
normalize(): URL {
|
||||
const normalized = normalizeUrl(this.url, { stripWWW: false })
|
||||
this.url = decodeURIComponent(normalized).replace(/\s/g, '+')
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.url
|
||||
}
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
await api.categories.load()
|
||||
const categories = await api.categories.all()
|
||||
|
||||
const output = []
|
||||
for (const category of categories) {
|
||||
let items = _.filter(streams, { categories: [{ id: category.id }] })
|
||||
output.push({ filepath: `categories/${category.id}.m3u`, items })
|
||||
}
|
||||
|
||||
let items = _.filter(streams, stream => !stream.categories.length)
|
||||
output.push({ filepath: 'categories/undefined.m3u', items })
|
||||
|
||||
return output
|
||||
}
|
55
scripts/generators/categoriesGenerator.ts
Normal file
55
scripts/generators/categoriesGenerator.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Category, Playlist } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type CategoriesGeneratorProps = {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class CategoriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, categories, logger }: CategoriesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.categories = categories
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const streams = this.streams.orderBy([(stream: Stream) => stream.getTitle()])
|
||||
|
||||
this.categories.forEach(async (category: Category) => {
|
||||
let categoryStreams = streams
|
||||
.filter((stream: Stream) => stream.hasCategory(category))
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream.categories
|
||||
? stream.categories
|
||||
.map((category: Category) => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
: ''
|
||||
stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(categoryStreams, { public: true })
|
||||
const filepath = `categories/${category.id}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => stream.noCategories())
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = `categories/undefined.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.countries.load()
|
||||
const countries = await api.countries.all()
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = regions.filter(r => r.code !== 'INT')
|
||||
await api.subdivisions.load()
|
||||
const subdivisions = await api.subdivisions.all()
|
||||
|
||||
let output = []
|
||||
for (const country of countries) {
|
||||
let countryRegionCodes = _.filter(regions, { countries: [country.code] }).map(
|
||||
r => `r/${r.code}`
|
||||
)
|
||||
const countrySubdivisions = _.filter(subdivisions, { country: country.code })
|
||||
const countryAreaCodes = countryRegionCodes.concat(countrySubdivisions.map(s => `s/${s.code}`))
|
||||
countryAreaCodes.push(`c/${country.code}`)
|
||||
|
||||
let items = _.filter(streams, stream => {
|
||||
return _.intersection(stream.broadcast_area, countryAreaCodes).length
|
||||
})
|
||||
|
||||
output.push({ filepath: `countries/${country.code.toLowerCase()}.m3u`, items })
|
||||
|
||||
for (let subdivision of countrySubdivisions) {
|
||||
let subdivisionItems = _.filter(streams, stream => {
|
||||
return stream.broadcast_area.includes(`s/${subdivision.code}`)
|
||||
})
|
||||
|
||||
if (subdivisionItems.length) {
|
||||
output.push({
|
||||
filepath: `subdivisions/${subdivision.code.toLowerCase()}.m3u`,
|
||||
items: subdivisionItems
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let intItems = _.filter(streams, stream => stream.broadcast_area.includes('r/INT'))
|
||||
output.push({
|
||||
filepath: `countries/int.m3u`,
|
||||
items: intItems
|
||||
})
|
||||
|
||||
output = output.filter(f => f.items.length > 0)
|
||||
|
||||
return output
|
||||
}
|
85
scripts/generators/countriesGenerator.ts
Normal file
85
scripts/generators/countriesGenerator.ts
Normal file
|
@ -0,0 +1,85 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Country, Region, Subdivision, Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type CountriesGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
countries: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class CountriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, countries, regions, subdivisions, logger }: CountriesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.countries = countries
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams
|
||||
.orderBy([stream => stream.getTitle()])
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
let regions = this.regions.filter((region: Region) => region.code !== 'INT')
|
||||
|
||||
this.countries.forEach(async (country: Country) => {
|
||||
const countrySubdivisions = this.subdivisions.filter(
|
||||
(subdivision: Subdivision) => subdivision.country === country.code
|
||||
)
|
||||
|
||||
const countrySubdivisionsCodes = countrySubdivisions.map(
|
||||
(subdivision: Subdivision) => `s/${subdivision.code}`
|
||||
)
|
||||
|
||||
const countryAreaCodes = regions
|
||||
.filter((region: Region) => region.countries.includes(country.code))
|
||||
.map((region: Region) => `r/${region.code}`)
|
||||
.concat(countrySubdivisionsCodes)
|
||||
.add(`c/${country.code}`)
|
||||
|
||||
const countryStreams = streams.filter(stream =>
|
||||
stream.broadcastArea.intersects(countryAreaCodes)
|
||||
)
|
||||
|
||||
if (countryStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(countryStreams, { public: true })
|
||||
const filepath = `countries/${country.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
|
||||
countrySubdivisions.forEach(async (subdivision: Subdivision) => {
|
||||
const subdivisionStreams = streams.filter(stream =>
|
||||
stream.broadcastArea.includes(`s/${subdivision.code}`)
|
||||
)
|
||||
|
||||
if (subdivisionStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(subdivisionStreams, { public: true })
|
||||
const filepath = `subdivisions/${subdivision.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
})
|
||||
|
||||
const internationalStreams = streams.filter(stream => stream.isInternational())
|
||||
if (internationalStreams.notEmpty()) {
|
||||
const playlist = new Playlist(internationalStreams, { public: true })
|
||||
const filepath = `countries/int.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
||||
}
|
3
scripts/generators/generator.ts
Normal file
3
scripts/generators/generator.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export interface Generator {
|
||||
generate(): Promise<void>
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
exports.categories = require('./categories')
|
||||
exports.countries = require('./countries')
|
||||
exports.languages = require('./languages')
|
||||
exports.regions = require('./regions')
|
||||
exports.index_m3u = require('./index_m3u')
|
||||
exports.index_nsfw_m3u = require('./index_nsfw_m3u')
|
||||
exports.index_category_m3u = require('./index_category_m3u')
|
||||
exports.index_country_m3u = require('./index_country_m3u')
|
||||
exports.index_language_m3u = require('./index_language_m3u')
|
||||
exports.index_region_m3u = require('./index_region_m3u')
|
10
scripts/generators/index.ts
Normal file
10
scripts/generators/index.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
export * from './categoriesGenerator'
|
||||
export * from './countriesGenerator'
|
||||
export * from './languagesGenerator'
|
||||
export * from './regionsGenerator'
|
||||
export * from './indexGenerator'
|
||||
export * from './indexNsfwGenerator'
|
||||
export * from './indexCategoryGenerator'
|
||||
export * from './indexCountryGenerator'
|
||||
export * from './indexLanguageGenerator'
|
||||
export * from './indexRegionGenerator'
|
53
scripts/generators/indexCategoryGenerator.ts
Normal file
53
scripts/generators/indexCategoryGenerator.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Category } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexCategoryGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexCategoryGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexCategoryGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
|
||||
let groupedStreams = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (stream.noCategories()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.categories.forEach((category: Category) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = category.name
|
||||
groupedStreams.push(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy(stream => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.category.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
104
scripts/generators/indexCountryGenerator.ts
Normal file
104
scripts/generators/indexCountryGenerator.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Country, Subdivision, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexCountryGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
countries: Collection
|
||||
subdivisions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexCountryGenerator implements Generator {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, countries, subdivisions, logger }: IndexCountryGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.countries = countries
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
|
||||
this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
.forEach(stream => {
|
||||
if (stream.noBroadcastArea()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
if (stream.isInternational()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'International'
|
||||
groupedStreams.add(streamClone)
|
||||
}
|
||||
|
||||
this.getStreamBroadcastCountries(stream).forEach((country: Country) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = country.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'International') return 'ZZ'
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZZ'
|
||||
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.country.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
|
||||
getStreamBroadcastCountries(stream: Stream) {
|
||||
const groupedRegions = this.regions.keyBy((region: Region) => region.code)
|
||||
const groupedCountries = this.countries.keyBy((country: Country) => country.code)
|
||||
const groupedSubdivisions = this.subdivisions.keyBy(
|
||||
(subdivision: Subdivision) => subdivision.code
|
||||
)
|
||||
|
||||
let broadcastCountries = new Collection()
|
||||
|
||||
stream.broadcastArea.forEach(broadcastAreaCode => {
|
||||
const [type, code] = broadcastAreaCode.split('/')
|
||||
switch (type) {
|
||||
case 'c':
|
||||
broadcastCountries.add(code)
|
||||
break
|
||||
case 'r':
|
||||
if (code !== 'INT' && groupedRegions.has(code)) {
|
||||
broadcastCountries = broadcastCountries.concat(groupedRegions.get(code).countries)
|
||||
}
|
||||
break
|
||||
case 's':
|
||||
if (groupedSubdivisions.has(code)) {
|
||||
broadcastCountries.add(groupedSubdivisions.get(code).country)
|
||||
}
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
return broadcastCountries
|
||||
.uniq()
|
||||
.map(code => groupedCountries.get(code))
|
||||
.filter(Boolean)
|
||||
}
|
||||
}
|
32
scripts/generators/indexGenerator.ts
Normal file
32
scripts/generators/indexGenerator.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import { Collection, Logger, Storage } from '../core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const sfwStreams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const playlist = new Playlist(sfwStreams, { public: true })
|
||||
const filepath = 'index.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
52
scripts/generators/indexLanguageGenerator.ts
Normal file
52
scripts/generators/indexLanguageGenerator.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Language } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexLanguageGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexLanguageGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexLanguageGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
.forEach(stream => {
|
||||
if (stream.noLanguages()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.languages.forEach((language: Language) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = language.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.language.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
30
scripts/generators/indexNsfwGenerator.ts
Normal file
30
scripts/generators/indexNsfwGenerator.ts
Normal file
|
@ -0,0 +1,30 @@
|
|||
import { Collection, Logger, Storage } from '../core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexNsfwGeneratorProps = {
|
||||
streams: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexNsfwGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: IndexNsfwGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const allStreams = this.streams.orderBy((stream: Stream) => stream.getTitle())
|
||||
|
||||
const playlist = new Playlist(allStreams, { public: true })
|
||||
const filepath = 'index.nsfw.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
83
scripts/generators/indexRegionGenerator.ts
Normal file
83
scripts/generators/indexRegionGenerator.ts
Normal file
|
@ -0,0 +1,83 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Stream, Playlist, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type IndexRegionGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class IndexRegionGenerator implements Generator {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, logger }: IndexRegionGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.regions = regions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
if (stream.noBroadcastArea()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.push(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
this.getStreamRegions(stream).forEach((region: Region) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = region.name
|
||||
groupedStreams.push(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.region.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
|
||||
getStreamRegions(stream: Stream) {
|
||||
let streamRegions = new Collection()
|
||||
stream.broadcastArea.forEach(broadcastAreaCode => {
|
||||
const [type, code] = broadcastAreaCode.split('/')
|
||||
switch (type) {
|
||||
case 'r':
|
||||
const groupedRegions = this.regions.keyBy((region: Region) => region.code)
|
||||
streamRegions.add(groupedRegions.get(code))
|
||||
break
|
||||
case 's':
|
||||
const [countryCode] = code.split('-')
|
||||
const subdivisionRegions = this.regions.filter((region: Region) =>
|
||||
region.countries.includes(countryCode)
|
||||
)
|
||||
streamRegions = streamRegions.concat(subdivisionRegions)
|
||||
break
|
||||
case 'c':
|
||||
const countryRegions = this.regions.filter((region: Region) =>
|
||||
region.countries.includes(code)
|
||||
)
|
||||
streamRegions = streamRegions.concat(countryRegions)
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
return streamRegions
|
||||
}
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.categories.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
stream.categories
|
||||
.filter(c => c)
|
||||
.forEach(category => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = category.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, item => {
|
||||
if (item.group_title === 'Undefined') return ''
|
||||
|
||||
return item.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.category.m3u', items }
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = _.keyBy(regions, 'code')
|
||||
|
||||
await api.countries.load()
|
||||
let countries = await api.countries.all()
|
||||
countries = _.keyBy(countries, 'code')
|
||||
|
||||
await api.subdivisions.load()
|
||||
let subdivisions = await api.subdivisions.all()
|
||||
subdivisions = _.keyBy(subdivisions, 'code')
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.broadcast_area.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
if (stream.broadcast_area.includes('r/INT')) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'International'
|
||||
items.push(item)
|
||||
}
|
||||
|
||||
const broadcastCountries = getBroadcastCountries(stream, { countries, regions, subdivisions })
|
||||
broadcastCountries.forEach(country => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = country.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = sortByGroupTitle(items)
|
||||
|
||||
return { filepath: 'index.country.m3u', items }
|
||||
}
|
||||
|
||||
function getBroadcastCountries(stream, { countries, regions, subdivisions }) {
|
||||
let codes = stream.broadcast_area.reduce((acc, item) => {
|
||||
const [type, code] = item.split('/')
|
||||
switch (type) {
|
||||
case 'c':
|
||||
acc.push(code)
|
||||
break
|
||||
case 'r':
|
||||
if (code !== 'INT' && regions[code]) {
|
||||
acc = acc.concat(regions[code].countries)
|
||||
}
|
||||
break
|
||||
case 's':
|
||||
if (subdivisions[code]) {
|
||||
acc.push(subdivisions[code].country)
|
||||
}
|
||||
break
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
codes = _.uniq(codes)
|
||||
|
||||
return codes.map(code => countries[code]).filter(c => c)
|
||||
}
|
||||
|
||||
function sortByGroupTitle(items) {
|
||||
return _.sortBy(items, item => {
|
||||
if (item.group_title === 'International') return '[' // ASCII character 91
|
||||
if (item.group_title === 'Undefined') return ']' // ASCII character 93
|
||||
|
||||
return item.group_title
|
||||
})
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.languages.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
stream.languages.forEach(language => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = language.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, i => {
|
||||
if (i.group_title === 'Undefined') return ''
|
||||
|
||||
return i.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.language.m3u', items }
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
return { filepath: 'index.m3u', items: streams }
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
return { filepath: 'index.nsfw.m3u', items: streams }
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
let regions = await api.regions.all()
|
||||
regions = _.keyBy(regions, 'code')
|
||||
|
||||
let items = []
|
||||
streams.forEach(stream => {
|
||||
if (!stream.broadcast_area.length) {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = 'Undefined'
|
||||
items.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
getChannelRegions(stream, { regions }).forEach(region => {
|
||||
const item = _.cloneDeep(stream)
|
||||
item.group_title = region.name
|
||||
items.push(item)
|
||||
})
|
||||
})
|
||||
|
||||
items = _.sortBy(items, i => {
|
||||
if (i.group_title === 'Undefined') return ''
|
||||
|
||||
return i.group_title
|
||||
})
|
||||
|
||||
return { filepath: 'index.region.m3u', items }
|
||||
}
|
||||
|
||||
function getChannelRegions(stream, { regions }) {
|
||||
return stream.broadcast_area
|
||||
.reduce((acc, item) => {
|
||||
const [type, code] = item.split('/')
|
||||
switch (type) {
|
||||
case 'r':
|
||||
acc.push(regions[code])
|
||||
break
|
||||
case 's':
|
||||
const [c] = code.split('-')
|
||||
const r1 = _.filter(regions, { countries: [c] })
|
||||
acc = acc.concat(r1)
|
||||
break
|
||||
case 'c':
|
||||
const r2 = _.filter(regions, { countries: [code] })
|
||||
acc = acc.concat(r2)
|
||||
break
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
.filter(i => i)
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
let languages = []
|
||||
streams.forEach(stream => {
|
||||
languages = languages.concat(stream.languages)
|
||||
})
|
||||
languages = _.uniqBy(languages, 'code')
|
||||
languages = _.sortBy(languages, 'name')
|
||||
|
||||
const output = []
|
||||
for (const language of languages) {
|
||||
let items = _.filter(streams, { languages: [{ code: language.code }] })
|
||||
if (items.length) {
|
||||
output.push({ filepath: `languages/${language.code}.m3u`, items })
|
||||
}
|
||||
}
|
||||
|
||||
let items = _.filter(streams, stream => !stream.languages.length)
|
||||
output.push({ filepath: 'languages/undefined.m3u', items })
|
||||
|
||||
return output
|
||||
}
|
50
scripts/generators/languagesGenerator.ts
Normal file
50
scripts/generators/languagesGenerator.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Playlist, Language, Stream } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type LanguagesGeneratorProps = { streams: Collection; logger: Logger }
|
||||
|
||||
export class LanguagesGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, logger }: LanguagesGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams.orderBy(stream => stream.getTitle()).filter(stream => stream.isSFW())
|
||||
|
||||
let languages = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
languages = languages.concat(stream.languages)
|
||||
})
|
||||
|
||||
languages
|
||||
.uniqBy((language: Language) => language.code)
|
||||
.orderBy((language: Language) => language.name)
|
||||
.forEach(async (language: Language) => {
|
||||
const languageStreams = streams.filter(stream => stream.hasLanguage(language))
|
||||
|
||||
if (languageStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(languageStreams, { public: true })
|
||||
const filepath = `languages/${language.code}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter(stream => stream.noLanguages())
|
||||
|
||||
if (undefinedStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'languages/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const api = require('../core/api')
|
||||
const _ = require('lodash')
|
||||
|
||||
module.exports = async function (streams = []) {
|
||||
streams = _.filter(streams, stream => stream.is_nsfw === false)
|
||||
|
||||
await api.regions.load()
|
||||
const regions = await api.regions.all()
|
||||
|
||||
await api.subdivisions.load()
|
||||
const subdivisions = await api.subdivisions.all()
|
||||
|
||||
const output = []
|
||||
for (const region of regions) {
|
||||
if (region.code === 'INT') continue
|
||||
|
||||
const regionCountries = region.countries
|
||||
let areaCodes = regionCountries.map(code => `c/${code}`)
|
||||
|
||||
const regionSubdivisions = _.filter(
|
||||
subdivisions,
|
||||
s => regionCountries.indexOf(s.country) > -1
|
||||
).map(s => `s/${s.code}`)
|
||||
areaCodes = areaCodes.concat(regionSubdivisions)
|
||||
|
||||
areaCodes.push(`r/${region.code}`)
|
||||
|
||||
let items = _.filter(streams, stream => _.intersection(stream.broadcast_area, areaCodes).length)
|
||||
output.push({ filepath: `regions/${region.code.toLowerCase()}.m3u`, items })
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
51
scripts/generators/regionsGenerator.ts
Normal file
51
scripts/generators/regionsGenerator.ts
Normal file
|
@ -0,0 +1,51 @@
|
|||
import { Generator } from './generator'
|
||||
import { Collection, Storage, Logger } from '../core'
|
||||
import { Playlist, Subdivision, Region } from '../models'
|
||||
import { PUBLIC_DIR } from '../constants'
|
||||
|
||||
type RegionsGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export class RegionsGenerator implements Generator {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logger: Logger
|
||||
|
||||
constructor({ streams, regions, subdivisions, logger }: RegionsGeneratorProps) {
|
||||
this.streams = streams
|
||||
this.regions = regions
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let streams = this.streams.orderBy(stream => stream.getTitle()).filter(stream => stream.isSFW())
|
||||
|
||||
this.regions.forEach(async (region: Region) => {
|
||||
if (region.code === 'INT') return
|
||||
|
||||
const regionSubdivisionsCodes = this.subdivisions
|
||||
.filter((subdivision: Subdivision) => region.countries.indexOf(subdivision.country) > -1)
|
||||
.map((subdivision: Subdivision) => `s/${subdivision.code}`)
|
||||
|
||||
const regionCodes = region.countries
|
||||
.map((code: string) => `c/${code}`)
|
||||
.concat(regionSubdivisionsCodes)
|
||||
.add(`r/${region.code}`)
|
||||
|
||||
const regionStreams = streams.filter(stream => stream.broadcastArea.intersects(regionCodes))
|
||||
|
||||
const playlist = new Playlist(regionStreams, { public: true })
|
||||
const filepath = `regions/${region.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logger.info(JSON.stringify({ filepath, count: playlist.streams.count() }))
|
||||
})
|
||||
}
|
||||
}
|
14
scripts/models/blocked.ts
Normal file
14
scripts/models/blocked.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type BlockedProps = {
|
||||
channel: string
|
||||
ref: string
|
||||
}
|
||||
|
||||
export class Blocked {
|
||||
channel: string
|
||||
ref: string
|
||||
|
||||
constructor({ ref, channel }: BlockedProps) {
|
||||
this.channel = channel
|
||||
this.ref = ref
|
||||
}
|
||||
}
|
14
scripts/models/category.ts
Normal file
14
scripts/models/category.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type CategoryProps = {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export class Category {
|
||||
id: string
|
||||
name: string
|
||||
|
||||
constructor({ id, name }: CategoryProps) {
|
||||
this.id = id
|
||||
this.name = name
|
||||
}
|
||||
}
|
79
scripts/models/channel.ts
Normal file
79
scripts/models/channel.ts
Normal file
|
@ -0,0 +1,79 @@
|
|||
import { Collection } from '../core'
|
||||
|
||||
type ChannelProps = {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: string[]
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcast_area: string[]
|
||||
languages: string[]
|
||||
categories: string[]
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
logo: string
|
||||
}
|
||||
|
||||
export class Channel {
|
||||
id: string
|
||||
name: string
|
||||
altNames: Collection
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
broadcastArea: Collection
|
||||
languages: Collection
|
||||
categories: Collection
|
||||
isNSFW: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replacedBy: string
|
||||
website: string
|
||||
logo: string
|
||||
|
||||
constructor({
|
||||
id,
|
||||
name,
|
||||
alt_names,
|
||||
network,
|
||||
owners,
|
||||
country,
|
||||
subdivision,
|
||||
city,
|
||||
broadcast_area,
|
||||
languages,
|
||||
categories,
|
||||
is_nsfw,
|
||||
launched,
|
||||
closed,
|
||||
replaced_by,
|
||||
website,
|
||||
logo
|
||||
}: ChannelProps) {
|
||||
this.id = id
|
||||
this.name = name
|
||||
this.altNames = new Collection(alt_names)
|
||||
this.network = network
|
||||
this.owners = new Collection(owners)
|
||||
this.country = country
|
||||
this.subdivision = subdivision
|
||||
this.city = city
|
||||
this.broadcastArea = new Collection(broadcast_area)
|
||||
this.languages = new Collection(languages)
|
||||
this.categories = new Collection(categories)
|
||||
this.isNSFW = is_nsfw
|
||||
this.launched = launched
|
||||
this.closed = closed
|
||||
this.replacedBy = replaced_by
|
||||
this.website = website
|
||||
this.logo = logo
|
||||
}
|
||||
}
|
20
scripts/models/country.ts
Normal file
20
scripts/models/country.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
type CountryProps = {
|
||||
code: string
|
||||
name: string
|
||||
languages: string[]
|
||||
flag: string
|
||||
}
|
||||
|
||||
export class Country {
|
||||
code: string
|
||||
name: string
|
||||
languages: string[]
|
||||
flag: string
|
||||
|
||||
constructor({ code, name, languages, flag }: CountryProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.languages = languages
|
||||
this.flag = flag
|
||||
}
|
||||
}
|
9
scripts/models/index.ts
Normal file
9
scripts/models/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
export * from './playlist'
|
||||
export * from './blocked'
|
||||
export * from './stream'
|
||||
export * from './category'
|
||||
export * from './channel'
|
||||
export * from './language'
|
||||
export * from './country'
|
||||
export * from './region'
|
||||
export * from './subdivision'
|
14
scripts/models/language.ts
Normal file
14
scripts/models/language.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
type LanguageProps = {
|
||||
code: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export class Language {
|
||||
code: string
|
||||
name: string
|
||||
|
||||
constructor({ code, name }: LanguageProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
}
|
||||
}
|
28
scripts/models/playlist.ts
Normal file
28
scripts/models/playlist.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
import { Collection } from '../core'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistOptions = {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
export class Playlist {
|
||||
streams: Collection
|
||||
options: {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
constructor(streams: Collection, options?: PlaylistOptions) {
|
||||
this.streams = streams
|
||||
this.options = options || { public: false }
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = `#EXTM3U\n`
|
||||
|
||||
this.streams.forEach((stream: Stream) => {
|
||||
output += stream.toString(this.options) + `\n`
|
||||
})
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
19
scripts/models/region.ts
Normal file
19
scripts/models/region.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
import { Collection } from '../core'
|
||||
|
||||
type RegionProps = {
|
||||
code: string
|
||||
name: string
|
||||
countries: string[]
|
||||
}
|
||||
|
||||
export class Region {
|
||||
code: string
|
||||
name: string
|
||||
countries: Collection
|
||||
|
||||
constructor({ code, name, countries }: RegionProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.countries = new Collection(countries)
|
||||
}
|
||||
}
|
177
scripts/models/stream.ts
Normal file
177
scripts/models/stream.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import { URL, Collection } from '../core'
|
||||
import { Category, Language } from './index'
|
||||
|
||||
type StreamProps = {
|
||||
name: string
|
||||
url: string
|
||||
filepath: string
|
||||
line: number
|
||||
channel?: string
|
||||
httpReferrer?: string
|
||||
label?: string
|
||||
quality?: string
|
||||
userAgent?: string
|
||||
}
|
||||
|
||||
export class Stream {
|
||||
channel: string
|
||||
filepath: string
|
||||
line: number
|
||||
httpReferrer: string
|
||||
label: string
|
||||
name: string
|
||||
quality: string
|
||||
url: string
|
||||
userAgent: string
|
||||
logo: string
|
||||
broadcastArea: Collection
|
||||
categories: Collection
|
||||
languages: Collection
|
||||
isNSFW: boolean
|
||||
groupTitle: string
|
||||
|
||||
constructor({
|
||||
channel,
|
||||
filepath,
|
||||
line,
|
||||
httpReferrer,
|
||||
label,
|
||||
name,
|
||||
quality,
|
||||
url,
|
||||
userAgent
|
||||
}: StreamProps) {
|
||||
this.channel = channel || ''
|
||||
this.filepath = filepath
|
||||
this.line = line
|
||||
this.httpReferrer = httpReferrer || ''
|
||||
this.label = label || ''
|
||||
this.name = name
|
||||
this.quality = quality || ''
|
||||
this.url = url
|
||||
this.userAgent = userAgent || ''
|
||||
this.logo = ''
|
||||
this.broadcastArea = new Collection()
|
||||
this.categories = new Collection()
|
||||
this.languages = new Collection()
|
||||
this.isNSFW = false
|
||||
this.groupTitle = 'Undefined'
|
||||
}
|
||||
|
||||
normalizeURL() {
|
||||
const url = new URL(this.url)
|
||||
|
||||
this.url = url.normalize().toString()
|
||||
}
|
||||
|
||||
clone(): Stream {
|
||||
return Object.assign(Object.create(Object.getPrototypeOf(this)), this)
|
||||
}
|
||||
|
||||
hasName(): boolean {
|
||||
return !!this.name
|
||||
}
|
||||
|
||||
noName(): boolean {
|
||||
return !this.name
|
||||
}
|
||||
|
||||
hasChannel() {
|
||||
return !!this.channel
|
||||
}
|
||||
|
||||
hasCategories(): boolean {
|
||||
return this.categories.notEmpty()
|
||||
}
|
||||
|
||||
noCategories(): boolean {
|
||||
return this.categories.empty()
|
||||
}
|
||||
|
||||
hasCategory(category: Category): boolean {
|
||||
return this.categories.includes((_category: Category) => _category.id === category.id)
|
||||
}
|
||||
|
||||
noLanguages(): boolean {
|
||||
return this.languages.empty()
|
||||
}
|
||||
|
||||
hasLanguage(language: Language): boolean {
|
||||
return this.languages.includes((_language: Language) => _language.code === language.code)
|
||||
}
|
||||
|
||||
noBroadcastArea(): boolean {
|
||||
return this.broadcastArea.empty()
|
||||
}
|
||||
|
||||
isInternational(): boolean {
|
||||
return this.broadcastArea.includes('r/INT')
|
||||
}
|
||||
|
||||
isSFW(): boolean {
|
||||
return this.isNSFW === false
|
||||
}
|
||||
|
||||
getTitle(): string {
|
||||
let title = `${this.name}`
|
||||
|
||||
if (this.quality) {
|
||||
title += ` (${this.quality})`
|
||||
}
|
||||
|
||||
if (this.label) {
|
||||
title += ` [${this.label}]`
|
||||
}
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
data() {
|
||||
return {
|
||||
channel: this.channel,
|
||||
filepath: this.filepath,
|
||||
httpReferrer: this.httpReferrer,
|
||||
label: this.label,
|
||||
name: this.name,
|
||||
quality: this.quality,
|
||||
url: this.url,
|
||||
userAgent: this.userAgent,
|
||||
line: this.line
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channel,
|
||||
url: this.url,
|
||||
http_referrer: this.httpReferrer || null,
|
||||
user_agent: this.userAgent || null
|
||||
}
|
||||
}
|
||||
|
||||
toString(options: { public: boolean }) {
|
||||
let output = `#EXTINF:-1 tvg-id="${this.channel}"`
|
||||
|
||||
if (options.public) {
|
||||
output += ` tvg-logo="${this.logo}" group-title="${this.groupTitle}"`
|
||||
}
|
||||
|
||||
if (this.userAgent) {
|
||||
output += ` user-agent="${this.userAgent}"`
|
||||
}
|
||||
|
||||
output += `,${this.getTitle()}`
|
||||
|
||||
if (this.httpReferrer) {
|
||||
output += `\n#EXTVLCOPT:http-referrer=${this.httpReferrer}`
|
||||
}
|
||||
|
||||
if (this.userAgent) {
|
||||
output += `\n#EXTVLCOPT:http-user-agent=${this.userAgent}`
|
||||
}
|
||||
|
||||
output += `\n${this.url}`
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
17
scripts/models/subdivision.ts
Normal file
17
scripts/models/subdivision.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
type SubdivisionProps = {
|
||||
code: string
|
||||
name: string
|
||||
country: string
|
||||
}
|
||||
|
||||
export class Subdivision {
|
||||
code: string
|
||||
name: string
|
||||
country: string
|
||||
|
||||
constructor({ code, name, country }: SubdivisionProps) {
|
||||
this.code = code
|
||||
this.name = name
|
||||
this.country = country
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.group_title) return this.group_title
|
||||
|
||||
if (this.categories.length) {
|
||||
return this.categories
|
||||
.filter(c => c)
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return 'Undefined'
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
exports.group_title = require('./group_title')
|
||||
exports.tvg_id = require('./tvg_id')
|
||||
exports.tvg_logo = require('./tvg_logo')
|
||||
exports.tvg_country = require('./tvg_country')
|
||||
exports.tvg_language = require('./tvg_language')
|
|
@ -1,16 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_country) return this.tvg_country
|
||||
|
||||
if (this.broadcast_area.length) {
|
||||
return this.broadcast_area
|
||||
.map(item => {
|
||||
const [_, code] = item.split('/')
|
||||
return code
|
||||
})
|
||||
.filter(i => i)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = function () {
|
||||
return this.channel || ''
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_language) return this.tvg_language
|
||||
|
||||
if (this.languages.length) {
|
||||
return this.languages
|
||||
.map(language => (language ? language.name : null))
|
||||
.filter(l => l)
|
||||
.sort()
|
||||
.join(';')
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
module.exports = function () {
|
||||
if (this.tvg_logo) return this.tvg_logo
|
||||
|
||||
return this.logo || ''
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue