Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions
5a93dc1b85 [version] update
Created by: pukkandan

:ci skip all
2022-03-08 19:57:15 +00:00
1140 changed files with 18354 additions and 19425 deletions

View File

@@ -1,8 +0,0 @@
root = true
[**.py]
charset = utf-8
indent_size = 4
indent_style = space
trim_trailing_whitespace = true
insert_final_newline = true

2
.gitattributes vendored
View File

@@ -2,5 +2,3 @@
Makefile* text whitespace=-tab-in-indent
*.sh text eol=lf
*.md diff=markdown
*.py diff=python

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a broken site
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **2022.03.08**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
required: true
@@ -51,12 +51,12 @@ body:
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2022.05.18 (exe)
[debug] yt-dlp version 2022.03.08 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2022.05.18)
yt-dlp is up to date (2022.03.08)
<more lines>
render: shell
validations:

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a new site support request
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **2022.03.08**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
required: true
@@ -62,12 +62,12 @@ body:
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2022.05.18 (exe)
[debug] yt-dlp version 2022.03.08 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2022.05.18)
yt-dlp is up to date (2022.03.08)
<more lines>
render: shell
validations:

View File

@@ -9,11 +9,11 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm requesting a site-specific feature
- label: I'm reporting a site feature request
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **2022.03.08**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
required: true
@@ -60,12 +60,12 @@ body:
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2022.05.18 (exe)
[debug] yt-dlp version 2022.03.08 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2022.05.18)
yt-dlp is up to date (2022.03.08)
<more lines>
render: shell
validations:

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a bug unrelated to a specific site
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **2022.03.08**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
required: true
@@ -45,12 +45,12 @@ body:
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2022.05.18 (exe)
[debug] yt-dlp version 2022.03.08 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2022.05.18)
yt-dlp is up to date (2022.03.08)
<more lines>
render: shell
validations:

View File

@@ -9,11 +9,11 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm requesting a feature unrelated to a specific site
- label: I'm reporting a feature request
required: true
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **2022.03.08**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
required: true
@@ -30,24 +30,3 @@ body:
placeholder: WRITE DESCRIPTION HERE
validations:
required: true
- type: textarea
id: log
attributes:
label: Verbose log
description: |
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
It should look similar to this:
placeholder: |
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2021.12.01 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2021.12.01)
<more lines>
render: shell

View File

@@ -9,16 +9,14 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm asking a question and **not** reporting a bug or requesting a feature
- label: I'm asking a question and **not** reporting a bug/feature request
required: true
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
required: true
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones. DO NOT post duplicates
required: true
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
required: true
- type: textarea
id: question
attributes:
@@ -37,7 +35,7 @@ body:
attributes:
label: Verbose log
description: |
If your question involves a yt-dlp command, provide the complete verbose output of that command.
If your question involes a yt-dlp command, provide the complete verbose output of that command.
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
It should look similar to this:
placeholder: |

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a broken site
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **%(version)s**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
required: true

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a new site support request
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **%(version)s**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
required: true

View File

@@ -9,11 +9,11 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm requesting a site-specific feature
- label: I'm reporting a site feature request
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **%(version)s**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
required: true

View File

@@ -11,9 +11,9 @@ body:
options:
- label: I'm reporting a bug unrelated to a specific site
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **%(version)s**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
- label: I've checked that all provided URLs are alive and playable in a browser
required: true
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
required: true

View File

@@ -9,11 +9,11 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm requesting a feature unrelated to a specific site
- label: I'm reporting a feature request
required: true
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- label: I've verified that I'm running yt-dlp version **%(version)s**. ([update instructions](https://github.com/yt-dlp/yt-dlp#update))
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
required: true
@@ -30,24 +30,3 @@ body:
placeholder: WRITE DESCRIPTION HERE
validations:
required: true
- type: textarea
id: log
attributes:
label: Verbose log
description: |
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
It should look similar to this:
placeholder: |
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
[debug] Portable config file: yt-dlp.conf
[debug] Portable config: ['-i']
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
[debug] yt-dlp version 2021.12.01 (exe)
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
[debug] Proxy map: {}
yt-dlp is up to date (2021.12.01)
<more lines>
render: shell

View File

@@ -9,16 +9,14 @@ body:
description: |
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
options:
- label: I'm asking a question and **not** reporting a bug or requesting a feature
- label: I'm asking a question and **not** reporting a bug/feature request
required: true
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
required: true
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones. DO NOT post duplicates
required: true
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
required: true
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
required: true
- type: textarea
id: question
attributes:
@@ -37,7 +35,7 @@ body:
attributes:
label: Verbose log
description: |
If your question involves a yt-dlp command, provide the complete verbose output of that command.
If your question involes a yt-dlp command, provide the complete verbose output of that command.
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
It should look similar to this:
placeholder: |

View File

@@ -1,29 +1,28 @@
<!--
# Please follow the guide below
## Please follow the guide below
- You will be asked some questions, please read them **carefully** and answer honestly
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
- Put an `x` into all the boxes [ ] relevant to your *pull request* (like that [x])
- Use *Preview* tab to see how your *pull request* will actually look like
-->
---
### Before submitting a *pull request* make sure you have:
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8)
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options:
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
### What is the purpose of your *pull request*?
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
- [ ] Core bug fix/improvement
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
- [ ] Bug fix
- [ ] Improvement
- [ ] New extractor
- [ ] New feature
---
### Description of your *pull request* and other information
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible.
Explanation of your *pull request* in arbitrary form goes here. Please make sure the description explains the purpose and effect of your *pull request* and is worded well enough to be understood. Provide as much context and examples as possible.

View File

@@ -2,20 +2,27 @@ name: Build
on: workflow_dispatch
jobs:
create_release:
build_unix:
runs-on: ubuntu-latest
outputs:
version_suffix: ${{ steps.version_suffix.outputs.version_suffix }}
ytdlp_version: ${{ steps.bump_version.outputs.ytdlp_version }}
upload_url: ${{ steps.create_release.outputs.upload_url }}
sha256_bin: ${{ steps.sha256_bin.outputs.sha256_bin }}
sha512_bin: ${{ steps.sha512_bin.outputs.sha512_bin }}
sha256_tar: ${{ steps.sha256_tar.outputs.sha256_tar }}
sha512_tar: ${{ steps.sha512_tar.outputs.sha512_tar }}
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10'
python-version: '3.8'
- name: Install packages
run: sudo apt-get -y install zip pandoc man
- name: Set version suffix
id: version_suffix
env:
@@ -27,27 +34,83 @@ jobs:
run: |
python devscripts/update-version.py ${{ steps.version_suffix.outputs.version_suffix }}
make issuetemplates
- name: Push to release
id: push_release
run: |
git config --global user.name github-actions
git config --global user.email github-actions@example.com
git add -u
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
git push origin --force ${{ github.event.ref }}:release
echo ::set-output name=head_sha::$(git rev-parse HEAD)
- name: Update master
id: push_master
env:
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
if: "env.PUSH_VERSION_COMMIT != ''"
run: git push origin ${{ github.event.ref }}
- name: Get Changelog
id: get_changelog
run: |
changelog=$(grep -oPz '(?s)(?<=### ${{ steps.bump_version.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)' Changelog.md) || true
changelog=$(cat Changelog.md | grep -oPz '(?s)(?<=### ${{ steps.bump_version.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)') || true
echo "changelog<<EOF" >> $GITHUB_ENV
echo "$changelog" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Build lazy extractors
id: lazy_extractors
run: python devscripts/make_lazy_extractors.py
- name: Run Make
run: make all tar
- name: Get SHA2-256SUMS for yt-dlp
id: sha256_bin
run: echo "::set-output name=sha256_bin::$(sha256sum yt-dlp | awk '{print $1}')"
- name: Get SHA2-256SUMS for yt-dlp.tar.gz
id: sha256_tar
run: echo "::set-output name=sha256_tar::$(sha256sum yt-dlp.tar.gz | awk '{print $1}')"
- name: Get SHA2-512SUMS for yt-dlp
id: sha512_bin
run: echo "::set-output name=sha512_bin::$(sha512sum yt-dlp | awk '{print $1}')"
- name: Get SHA2-512SUMS for yt-dlp.tar.gz
id: sha512_tar
run: echo "::set-output name=sha512_tar::$(sha512sum yt-dlp.tar.gz | awk '{print $1}')"
- name: Install dependencies for pypi
env:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
if: "env.PYPI_TOKEN != ''"
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish on pypi
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
if: "env.TWINE_PASSWORD != ''"
run: |
rm -rf dist/*
python setup.py sdist bdist_wheel
twine upload dist/*
- name: Install SSH private key
env:
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
if: "env.BREW_TOKEN != ''"
uses: yt-dlp/ssh-agent@v0.5.3
with:
ssh-private-key: ${{ env.BREW_TOKEN }}
- name: Update Homebrew Formulae
env:
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
if: "env.BREW_TOKEN != ''"
run: |
git clone git@github.com:yt-dlp/homebrew-taps taps/
python3 devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ steps.bump_version.outputs.ytdlp_version }}"
git -C taps/ config user.name github-actions
git -C taps/ config user.email github-actions@example.com
git -C taps/ commit -am 'yt-dlp: ${{ steps.bump_version.outputs.ytdlp_version }}'
git -C taps/ push
- name: Create Release
id: create_release
uses: actions/create-release@v1
@@ -66,60 +129,13 @@ jobs:
${{ env.changelog }}
draft: false
prerelease: false
build_unix:
needs: create_release
runs-on: ubuntu-18.04 # Standalone executable should be built on minimum supported OS
outputs:
sha256_bin: ${{ steps.get_sha.outputs.sha256_bin }}
sha512_bin: ${{ steps.get_sha.outputs.sha512_bin }}
sha256_tar: ${{ steps.get_sha.outputs.sha256_tar }}
sha512_tar: ${{ steps.get_sha.outputs.sha512_tar }}
sha256_linux: ${{ steps.get_sha.outputs.sha256_linux }}
sha512_linux: ${{ steps.get_sha.outputs.sha512_linux }}
sha256_linux_zip: ${{ steps.get_sha.outputs.sha256_linux_zip }}
sha512_linux_zip: ${{ steps.get_sha.outputs.sha512_linux_zip }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: Install Requirements
run: |
sudo apt-get -y install zip pandoc man
python -m pip install --upgrade pip setuptools wheel twine
python -m pip install Pyinstaller -r requirements.txt
- name: Prepare
run: |
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
python devscripts/make_lazy_extractors.py
- name: Build Unix executables
run: |
make all tar
python pyinst.py --onedir
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
python pyinst.py
- name: Get SHA2-SUMS
id: get_sha
run: |
echo "::set-output name=sha256_bin::$(sha256sum yt-dlp | awk '{print $1}')"
echo "::set-output name=sha512_bin::$(sha512sum yt-dlp | awk '{print $1}')"
echo "::set-output name=sha256_tar::$(sha256sum yt-dlp.tar.gz | awk '{print $1}')"
echo "::set-output name=sha512_tar::$(sha512sum yt-dlp.tar.gz | awk '{print $1}')"
echo "::set-output name=sha256_linux::$(sha256sum dist/yt-dlp_linux | awk '{print $1}')"
echo "::set-output name=sha512_linux::$(sha512sum dist/yt-dlp_linux | awk '{print $1}')"
echo "::set-output name=sha256_linux_zip::$(sha256sum dist/yt-dlp_linux.zip | awk '{print $1}')"
echo "::set-output name=sha512_linux_zip::$(sha512sum dist/yt-dlp_linux.zip | awk '{print $1}')"
- name: Upload zip binary
- name: Upload yt-dlp Unix binary
id: upload-release-asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yt-dlp
asset_name: yt-dlp
asset_content_type: application/octet-stream
@@ -128,269 +144,270 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yt-dlp.tar.gz
asset_name: yt-dlp.tar.gz
asset_content_type: application/gzip
- name: Upload standalone binary
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
asset_path: ./dist/yt-dlp_linux
asset_name: yt-dlp_linux
asset_content_type: application/octet-stream
- name: Upload onedir binary
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
asset_path: ./dist/yt-dlp_linux.zip
asset_name: yt-dlp_linux.zip
asset_content_type: application/zip
- name: Build and publish on PyPi
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
if: "env.TWINE_PASSWORD != ''"
run: |
rm -rf dist/*
python setup.py sdist bdist_wheel
twine upload dist/*
- name: Install SSH private key for Homebrew
env:
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
if: "env.BREW_TOKEN != ''"
uses: yt-dlp/ssh-agent@v0.5.3
with:
ssh-private-key: ${{ env.BREW_TOKEN }}
- name: Update Homebrew Formulae
env:
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
if: "env.BREW_TOKEN != ''"
run: |
git clone git@github.com:yt-dlp/homebrew-taps taps/
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ steps.bump_version.outputs.ytdlp_version }}"
git -C taps/ config user.name github-actions
git -C taps/ config user.email github-actions@example.com
git -C taps/ commit -am 'yt-dlp: ${{ steps.bump_version.outputs.ytdlp_version }}'
git -C taps/ push
build_macos:
runs-on: macos-11
needs: create_release
needs: build_unix
outputs:
sha256_macos: ${{ steps.get_sha.outputs.sha256_macos }}
sha512_macos: ${{ steps.get_sha.outputs.sha512_macos }}
sha256_macos_zip: ${{ steps.get_sha.outputs.sha256_macos_zip }}
sha512_macos_zip: ${{ steps.get_sha.outputs.sha512_macos_zip }}
sha256_macos: ${{ steps.sha256_macos.outputs.sha256_macos }}
sha512_macos: ${{ steps.sha512_macos.outputs.sha512_macos }}
sha256_macos_zip: ${{ steps.sha256_macos_zip.outputs.sha256_macos_zip }}
sha512_macos_zip: ${{ steps.sha512_macos_zip.outputs.sha512_macos_zip }}
steps:
- uses: actions/checkout@v2
# NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used
# In order to create a universal2 application, the version of python3 in /usr/bin has to be used
- name: Install Requirements
run: |
brew install coreutils
/usr/bin/python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
- name: Prepare
run: |
/usr/bin/python3 devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
/usr/bin/python3 devscripts/make_lazy_extractors.py
- name: Build
run: |
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
/usr/bin/python3 pyinst.py --target-architecture universal2
- name: Get SHA2-SUMS
id: get_sha
run: |
echo "::set-output name=sha256_macos::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
echo "::set-output name=sha512_macos::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
echo "::set-output name=sha256_macos_zip::$(sha256sum dist/yt-dlp_macos.zip | awk '{print $1}')"
echo "::set-output name=sha512_macos_zip::$(sha512sum dist/yt-dlp_macos.zip | awk '{print $1}')"
- name: Upload standalone binary
/usr/bin/python3 -m pip install -U --user pip Pyinstaller==4.10 -r requirements.txt
- name: Bump version
id: bump_version
run: /usr/bin/python3 devscripts/update-version.py
- name: Build lazy extractors
id: lazy_extractors
run: /usr/bin/python3 devscripts/make_lazy_extractors.py
- name: Run PyInstaller Script
run: /usr/bin/python3 pyinst.py --target-architecture universal2 --onefile
- name: Upload yt-dlp MacOS binary
id: upload-release-macos
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp_macos
asset_name: yt-dlp_macos
asset_content_type: application/octet-stream
- name: Upload onedir binary
- name: Get SHA2-256SUMS for yt-dlp_macos
id: sha256_macos
run: echo "::set-output name=sha256_macos::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
- name: Get SHA2-512SUMS for yt-dlp_macos
id: sha512_macos
run: echo "::set-output name=sha512_macos::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
- name: Run PyInstaller Script with --onedir
run: |
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
zip ./dist/yt-dlp_macos.zip ./dist/yt-dlp_macos
- name: Upload yt-dlp MacOS onedir
id: upload-release-macos-zip
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp_macos.zip
asset_name: yt-dlp_macos.zip
asset_content_type: application/zip
- name: Get SHA2-256SUMS for yt-dlp_macos.zip
id: sha256_macos_zip
run: echo "::set-output name=sha256_macos_zip::$(sha256sum dist/yt-dlp_macos.zip | awk '{print $1}')"
- name: Get SHA2-512SUMS for yt-dlp_macos.zip
id: sha512_macos_zip
run: echo "::set-output name=sha512_macos_zip::$(sha512sum dist/yt-dlp_macos.zip | awk '{print $1}')"
build_windows:
runs-on: windows-latest
needs: create_release
needs: build_unix
outputs:
sha256_win: ${{ steps.get_sha.outputs.sha256_win }}
sha512_win: ${{ steps.get_sha.outputs.sha512_win }}
sha256_py2exe: ${{ steps.get_sha.outputs.sha256_py2exe }}
sha512_py2exe: ${{ steps.get_sha.outputs.sha512_py2exe }}
sha256_win_zip: ${{ steps.get_sha.outputs.sha256_win_zip }}
sha512_win_zip: ${{ steps.get_sha.outputs.sha512_win_zip }}
sha256_win: ${{ steps.sha256_win.outputs.sha256_win }}
sha512_win: ${{ steps.sha512_win.outputs.sha512_win }}
sha256_py2exe: ${{ steps.sha256_py2exe.outputs.sha256_py2exe }}
sha512_py2exe: ${{ steps.sha512_py2exe.outputs.sha512_py2exe }}
sha256_win_zip: ${{ steps.sha256_win_zip.outputs.sha256_win_zip }}
sha512_win_zip: ${{ steps.sha512_win_zip.outputs.sha512_win_zip }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with: # 3.8 is used for Win7 support
# 3.8 is used for Win7 support
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install Requirements
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
# Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
run: |
python -m pip install --upgrade pip setuptools wheel py2exe
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
- name: Prepare
run: |
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
python devscripts/make_lazy_extractors.py
- name: Build
run: |
python setup.py py2exe
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
python pyinst.py
python pyinst.py --onedir
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
- name: Get SHA2-SUMS
id: get_sha
run: |
echo "::set-output name=sha256_py2exe::$((Get-FileHash dist\yt-dlp_min.exe -Algorithm SHA256).Hash.ToLower())"
echo "::set-output name=sha512_py2exe::$((Get-FileHash dist\yt-dlp_min.exe -Algorithm SHA512).Hash.ToLower())"
echo "::set-output name=sha256_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
echo "::set-output name=sha512_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
echo "::set-output name=sha256_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA256).Hash.ToLower())"
echo "::set-output name=sha512_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA512).Hash.ToLower())"
- name: Upload py2exe binary
- name: Bump version
id: bump_version
env:
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
run: python devscripts/update-version.py ${{ env.version_suffix }}
- name: Build lazy extractors
id: lazy_extractors
run: python devscripts/make_lazy_extractors.py
- name: Run PyInstaller Script
run: python pyinst.py
- name: Upload yt-dlp.exe Windows binary
id: upload-release-windows
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
asset_path: ./dist/yt-dlp_min.exe
asset_name: yt-dlp_min.exe
asset_content_type: application/vnd.microsoft.portable-executable
- name: Upload standalone binary
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp.exe
asset_name: yt-dlp.exe
asset_content_type: application/vnd.microsoft.portable-executable
- name: Upload onedir binary
- name: Get SHA2-256SUMS for yt-dlp.exe
id: sha256_win
run: echo "::set-output name=sha256_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
- name: Get SHA2-512SUMS for yt-dlp.exe
id: sha512_win
run: echo "::set-output name=sha512_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
- name: Run PyInstaller Script with --onedir
run: |
python pyinst.py --onedir
Compress-Archive -LiteralPath ./dist/yt-dlp -DestinationPath ./dist/yt-dlp_win.zip
- name: Upload yt-dlp Windows onedir
id: upload-release-windows-zip
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp_win.zip
asset_name: yt-dlp_win.zip
asset_content_type: application/zip
- name: Get SHA2-256SUMS for yt-dlp_win.zip
id: sha256_win_zip
run: echo "::set-output name=sha256_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA256).Hash.ToLower())"
- name: Get SHA2-512SUMS for yt-dlp_win.zip
id: sha512_win_zip
run: echo "::set-output name=sha512_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA512).Hash.ToLower())"
- name: Run py2exe Script
run: python setup.py py2exe
- name: Upload yt-dlp_min.exe Windows binary
id: upload-release-windows-py2exe
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp.exe
asset_name: yt-dlp_min.exe
asset_content_type: application/vnd.microsoft.portable-executable
- name: Get SHA2-256SUMS for yt-dlp_min.exe
id: sha256_py2exe
run: echo "::set-output name=sha256_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
- name: Get SHA2-512SUMS for yt-dlp_min.exe
id: sha512_py2exe
run: echo "::set-output name=sha512_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
build_windows32:
runs-on: windows-latest
needs: create_release
needs: build_unix
outputs:
sha256_win32: ${{ steps.get_sha.outputs.sha256_win32 }}
sha512_win32: ${{ steps.get_sha.outputs.sha512_win32 }}
sha256_win32: ${{ steps.sha256_win32.outputs.sha256_win32 }}
sha512_win32: ${{ steps.sha512_win32.outputs.sha512_win32 }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
# 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
- name: Set up Python 3.7 32-Bit
uses: actions/setup-python@v2
with:
python-version: '3.7'
architecture: 'x86'
- name: Install Requirements
run: |
python -m pip install --upgrade pip setuptools wheel
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
- name: Prepare
run: |
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
python devscripts/make_lazy_extractors.py
- name: Build
run: |
python pyinst.py
- name: Get SHA2-SUMS
id: get_sha
run: |
echo "::set-output name=sha256_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA256).Hash.ToLower())"
echo "::set-output name=sha512_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA512).Hash.ToLower())"
- name: Upload standalone binary
- name: Bump version
id: bump_version
env:
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
run: python devscripts/update-version.py ${{ env.version_suffix }}
- name: Build lazy extractors
id: lazy_extractors
run: python devscripts/make_lazy_extractors.py
- name: Run PyInstaller Script for 32 Bit
run: python pyinst.py
- name: Upload Executable yt-dlp_x86.exe
id: upload-release-windows32
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./dist/yt-dlp_x86.exe
asset_name: yt-dlp_x86.exe
asset_content_type: application/vnd.microsoft.portable-executable
- name: Get SHA2-256SUMS for yt-dlp_x86.exe
id: sha256_win32
run: echo "::set-output name=sha256_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA256).Hash.ToLower())"
- name: Get SHA2-512SUMS for yt-dlp_x86.exe
id: sha512_win32
run: echo "::set-output name=sha512_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA512).Hash.ToLower())"
finish:
runs-on: ubuntu-latest
needs: [create_release, build_unix, build_windows, build_windows32, build_macos]
needs: [build_unix, build_windows, build_windows32, build_macos]
steps:
- name: Make SHA2-SUMS files
- name: Make SHA2-256SUMS file
env:
SHA256_BIN: ${{ needs.build_unix.outputs.sha256_bin }}
SHA256_TAR: ${{ needs.build_unix.outputs.sha256_tar }}
SHA256_WIN: ${{ needs.build_windows.outputs.sha256_win }}
SHA256_PY2EXE: ${{ needs.build_windows.outputs.sha256_py2exe }}
SHA256_WIN_ZIP: ${{ needs.build_windows.outputs.sha256_win_zip }}
SHA256_WIN32: ${{ needs.build_windows32.outputs.sha256_win32 }}
SHA256_MACOS: ${{ needs.build_macos.outputs.sha256_macos }}
SHA256_MACOS_ZIP: ${{ needs.build_macos.outputs.sha256_macos_zip }}
run: |
echo "${{ needs.build_unix.outputs.sha256_bin }} yt-dlp" >> SHA2-256SUMS
echo "${{ needs.build_unix.outputs.sha256_tar }} yt-dlp.tar.gz" >> SHA2-256SUMS
echo "${{ needs.build_unix.outputs.sha256_linux }} yt-dlp_linux" >> SHA2-256SUMS
echo "${{ needs.build_unix.outputs.sha256_linux_zip }} yt-dlp_linux.zip" >> SHA2-256SUMS
echo "${{ needs.build_windows.outputs.sha256_win }} yt-dlp.exe" >> SHA2-256SUMS
echo "${{ needs.build_windows.outputs.sha256_py2exe }} yt-dlp_min.exe" >> SHA2-256SUMS
echo "${{ needs.build_windows32.outputs.sha256_win32 }} yt-dlp_x86.exe" >> SHA2-256SUMS
echo "${{ needs.build_windows.outputs.sha256_win_zip }} yt-dlp_win.zip" >> SHA2-256SUMS
echo "${{ needs.build_macos.outputs.sha256_macos }} yt-dlp_macos" >> SHA2-256SUMS
echo "${{ needs.build_macos.outputs.sha256_macos_zip }} yt-dlp_macos.zip" >> SHA2-256SUMS
echo "${{ needs.build_unix.outputs.sha512_bin }} yt-dlp" >> SHA2-512SUMS
echo "${{ needs.build_unix.outputs.sha512_tar }} yt-dlp.tar.gz" >> SHA2-512SUMS
echo "${{ needs.build_unix.outputs.sha512_linux }} yt-dlp_linux" >> SHA2-512SUMS
echo "${{ needs.build_unix.outputs.sha512_linux_zip }} yt-dlp_linux.zip" >> SHA2-512SUMS
echo "${{ needs.build_windows.outputs.sha512_win }} yt-dlp.exe" >> SHA2-512SUMS
echo "${{ needs.build_windows.outputs.sha512_py2exe }} yt-dlp_min.exe" >> SHA2-512SUMS
echo "${{ needs.build_windows32.outputs.sha512_win32 }} yt-dlp_x86.exe" >> SHA2-512SUMS
echo "${{ needs.build_windows.outputs.sha512_win_zip }} yt-dlp_win.zip" >> SHA2-512SUMS
echo "${{ needs.build_macos.outputs.sha512_macos }} yt-dlp_macos" >> SHA2-512SUMS
echo "${{ needs.build_macos.outputs.sha512_macos_zip }} yt-dlp_macos.zip" >> SHA2-512SUMS
- name: Upload SHA2-256SUMS file
echo "${{ env.SHA256_BIN }} yt-dlp" >> SHA2-256SUMS
echo "${{ env.SHA256_TAR }} yt-dlp.tar.gz" >> SHA2-256SUMS
echo "${{ env.SHA256_WIN }} yt-dlp.exe" >> SHA2-256SUMS
echo "${{ env.SHA256_PY2EXE }} yt-dlp_min.exe" >> SHA2-256SUMS
echo "${{ env.SHA256_WIN32 }} yt-dlp_x86.exe" >> SHA2-256SUMS
echo "${{ env.SHA256_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-256SUMS
echo "${{ env.SHA256_MACOS }} yt-dlp_macos" >> SHA2-256SUMS
echo "${{ env.SHA256_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-256SUMS
- name: Upload 256SUMS file
id: upload-sums
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./SHA2-256SUMS
asset_name: SHA2-256SUMS
asset_content_type: text/plain
- name: Upload SHA2-512SUMS file
- name: Make SHA2-512SUMS file
env:
SHA512_BIN: ${{ needs.build_unix.outputs.sha512_bin }}
SHA512_TAR: ${{ needs.build_unix.outputs.sha512_tar }}
SHA512_WIN: ${{ needs.build_windows.outputs.sha512_win }}
SHA512_PY2EXE: ${{ needs.build_windows.outputs.sha512_py2exe }}
SHA512_WIN_ZIP: ${{ needs.build_windows.outputs.sha512_win_zip }}
SHA512_WIN32: ${{ needs.build_windows32.outputs.sha512_win32 }}
SHA512_MACOS: ${{ needs.build_macos.outputs.sha512_macos }}
SHA512_MACOS_ZIP: ${{ needs.build_macos.outputs.sha512_macos_zip }}
run: |
echo "${{ env.SHA512_BIN }} yt-dlp" >> SHA2-512SUMS
echo "${{ env.SHA512_TAR }} yt-dlp.tar.gz" >> SHA2-512SUMS
echo "${{ env.SHA512_WIN }} yt-dlp.exe" >> SHA2-512SUMS
echo "${{ env.SHA512_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-512SUMS
echo "${{ env.SHA512_PY2EXE }} yt-dlp_min.exe" >> SHA2-512SUMS
echo "${{ env.SHA512_WIN32 }} yt-dlp_x86.exe" >> SHA2-512SUMS
echo "${{ env.SHA512_MACOS }} yt-dlp_macos" >> SHA2-512SUMS
echo "${{ env.SHA512_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-512SUMS
- name: Upload 512SUMS file
id: upload-512sums
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ needs.create_release.outputs.upload_url }}
upload_url: ${{ needs.build_unix.outputs.upload_url }}
asset_path: ./SHA2-512SUMS
asset_name: SHA2-512SUMS
asset_content_type: text/plain

View File

@@ -8,17 +8,14 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
# CPython 3.9 is in quick-test
python-version: ['3.6', '3.7', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8]
os: [ubuntu-18.04]
# py3.9 is in quick-test
python-version: [3.7, 3.8, 3.10-dev, pypy-3.6, pypy-3.7]
run-tests-ext: [sh]
include:
# atleast one of each CPython/PyPy tests must be in windows
# atleast one of the tests must be in windows
- os: windows-latest
python-version: '3.8'
run-tests-ext: bat
- os: windows-latest
python-version: pypy-3.9
python-version: 3.6
run-tests-ext: bat
steps:
- uses: actions/checkout@v2

View File

@@ -8,16 +8,12 @@ jobs:
strategy:
fail-fast: true
matrix:
os: [ubuntu-latest]
python-version: ['3.6', '3.7', '3.9', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8]
os: [ubuntu-18.04]
python-version: [3.7, 3.8, 3.9, 3.10-dev, pypy-3.6, pypy-3.7]
run-tests-ext: [sh]
include:
# atleast one of each CPython/PyPy tests must be in windows
- os: windows-latest
python-version: '3.8'
run-tests-ext: bat
- os: windows-latest
python-version: pypy-3.9
python-version: 3.6
run-tests-ext: bat
steps:
- uses: actions/checkout@v2

4
.gitignore vendored
View File

@@ -82,7 +82,6 @@ updates_key.pem
*.egg-info
.tox
*.class
*.isorted
# Generated
AUTHORS
@@ -117,6 +116,3 @@ yt-dlp.zip
ytdlp_plugins/extractor/*
!ytdlp_plugins/extractor/__init__.py
!ytdlp_plugins/extractor/sample.py
ytdlp_plugins/postprocessor/*
!ytdlp_plugins/postprocessor/__init__.py
!ytdlp_plugins/postprocessor/sample.py

22
.readthedocs.yml Normal file
View File

@@ -0,0 +1,22 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Optionally build your docs in additional formats such as PDF
formats:
- epub
- pdf
- htmlzip
# Optionally set the version of Python and requirements required to build your docs
python:
version: 3
install:
- requirements: docs/requirements.txt

View File

@@ -178,6 +178,7 @@ After you have ensured this site is distributing its content legally, you can fo
1. Start with this simple template and save it to `yt_dlp/extractor/yourextractor.py`:
```python
# coding: utf-8
from .common import InfoExtractor
@@ -214,7 +215,7 @@ After you have ensured this site is distributing its content legally, you can fo
# TODO more properties (see yt_dlp/extractor/common.py)
}
```
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`.
1. Add an import in [`yt_dlp/extractor/extractors.py`](yt_dlp/extractor/extractors.py).
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L91-L426). Add tests and code for as many as you want.
@@ -225,7 +226,7 @@ After you have ensured this site is distributing its content legally, you can fo
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.6 and above. Backward compatibility is not required for even older versions of Python.
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
$ git add yt_dlp/extractor/_extractors.py
$ git add yt_dlp/extractor/extractors.py
$ git add yt_dlp/extractor/yourextractor.py
$ git commit -m '[yourextractor] Add extractor'
$ git push origin yourextractor
@@ -300,10 +301,14 @@ description = meta['summary'] # incorrect
The latter will break extraction process with `KeyError` if `summary` disappears from `meta` at some later time but with the former approach extraction will just go ahead with `description` set to `None` which is perfectly fine (remember `None` is equivalent to the absence of data).
If the data is nested, do not use `.get` chains, but instead make use of `traverse_obj`.
If the data is nested, do not use `.get` chains, but instead make use of the utility functions `try_get` or `traverse_obj`
Considering the above `meta` again, assume you want to extract `["user"]["name"]` and put it in the resulting info dict as `uploader`
```python
uploader = try_get(meta, lambda x: x['user']['name']) # correct
```
or
```python
uploader = traverse_obj(meta, ('user', 'name')) # correct
```
@@ -317,10 +322,6 @@ or
```python
uploader = meta.get('user', {}).get('name') # incorrect
```
or
```python
uploader = try_get(meta, lambda x: x['user']['name']) # old utility
```
Similarly, you should pass `fatal=False` when extracting optional data from a webpage with `_search_regex`, `_html_search_regex` or similar methods, for instance:
@@ -346,25 +347,25 @@ On failure this code will silently continue the extraction with `description` se
Another thing to remember is not to try to iterate over `None`
Say you extracted a list of thumbnails into `thumbnail_data` and want to iterate over them
Say you extracted a list of thumbnails into `thumbnail_data` using `try_get` and now want to iterate over them
```python
thumbnail_data = data.get('thumbnails') or []
thumbnail_data = try_get(...)
thumbnails = [{
'url': item['url']
} for item in thumbnail_data] # correct
} for item in thumbnail_data or []] # correct
```
and not like:
```python
thumbnail_data = data.get('thumbnails')
thumbnail_data = try_get(...)
thumbnails = [{
'url': item['url']
} for item in thumbnail_data] # incorrect
```
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
In the later case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `for item in thumbnail_data or []` avoids this error and results in setting an empty list in `thumbnails` instead.
### Provide fallbacks
@@ -374,21 +375,21 @@ When extracting metadata try to do so from multiple sources. For example if `tit
#### Example
Say `meta` from the previous example has a `title` and you are about to extract it like:
Say `meta` from the previous example has a `title` and you are about to extract it. Since `title` is a mandatory meta field you should end up with something like:
```python
title = meta.get('title')
title = meta['title']
```
If `title` disappears from `meta` in future due to some changes on the hoster's side the title extraction would fail.
If `title` disappears from `meta` in future due to some changes on the hoster's side the extraction would fail since `title` is mandatory. That's expected.
Assume that you have some another source you can extract `title` from, for example `og:title` HTML meta of a `webpage`. In this case you can provide a fallback like:
Assume that you have some another source you can extract `title` from, for example `og:title` HTML meta of a `webpage`. In this case you can provide a fallback scenario:
```python
title = meta.get('title') or self._og_search_title(webpage)
```
This code will try to extract from `meta` first and if it fails it will try extracting `og:title` from a `webpage`, making the extractor more robust.
This code will try to extract from `meta` first and if it fails it will try extracting `og:title` from a `webpage`.
### Regular expressions
@@ -431,7 +432,7 @@ title = self._search_regex( # correct
r'<span[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
```
which tolerates potential changes in the `style` attribute's value. Or even better:
Or even better:
```python
title = self._search_regex( # correct
@@ -439,7 +440,7 @@ title = self._search_regex( # correct
webpage, 'title', group='title')
```
which also handles both single quotes in addition to double quotes.
Note how you tolerate potential changes in the `style` attribute's value or switch from using double quotes to single for `class` attribute:
The code definitely should not look like:
@@ -460,41 +461,6 @@ title = self._search_regex( # incorrect
Here the presence or absence of other attributes including `style` is irrelevent for the data we need, and so the regex must not depend on it
#### Keep the regular expressions as simple as possible, but no simpler
Since many extractors deal with unstructured data provided by websites, we will often need to use very complex regular expressions. You should try to use the *simplest* regex that can accomplish what you want. In other words, each part of the regex must have a reason for existing. If you can take out a symbol and the functionality does not change, the symbol should not be there.
##### Example
Correct:
```python
_VALID_URL = r'https?://(?:www\.)?website\.com/(?:[^/]+/){3,4}(?P<display_id>[^/]+)_(?P<id>\d+)'
```
Incorrect:
```python
_VALID_URL = r'https?:\/\/(?:www\.)?website\.com\/[^\/]+/[^\/]+/[^\/]+(?:\/[^\/]+)?\/(?P<display_id>[^\/]+)_(?P<id>\d+)'
```
#### Do not misuse `.` and use the correct quantifiers (`+*?`)
Avoid creating regexes that over-match because of wrong use of quantifiers. Also try to avoid non-greedy matching (`?`) where possible since they could easily result in [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)
Correct:
```python
title = self._search_regex(r'<span\b[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
```
Incorrect:
```python
title = self._search_regex(r'<span\b.*class="title".*>(.+?)<', webpage, 'title')
```
### Long lines policy
There is a soft limit to keep lines of code under 100 characters long. This means it should be respected if possible and if it does not make readability and code maintenance worse. Sometimes, it may be reasonable to go upto 120 characters and sometimes even 80 can be unreadable. Keep in mind that this is not a hard limit and is just one of many tools to make the code more readable.
@@ -556,35 +522,27 @@ formats = self._extract_m3u8_formats(m3u8_url,
### Quotes
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it *significantly* harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it significantly harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
### Inline values
Extracting variables is acceptable for reducing code duplication and improving readability of complex expressions. However, you should avoid extracting variables used only once and moving them to opposite parts of the extractor file, which makes reading the linear flow difficult.
#### Examples
#### Example
Correct:
```python
return {
'title': self._html_search_regex(r'<h1>([^<]+)</h1>', webpage, 'title'),
# ...some lines of code...
}
title = self._html_search_regex(r'<title>([^<]+)</title>', webpage, 'title')
```
Incorrect:
```python
TITLE_RE = r'<h1>([^<]+)</h1>'
TITLE_RE = r'<title>([^<]+)</title>'
# ...some lines of code...
title = self._html_search_regex(TITLE_RE, webpage, 'title')
# ...some lines of code...
return {
'title': title,
# ...some lines of code...
}
```
@@ -616,32 +574,33 @@ Methods supporting list of patterns are: `_search_regex`, `_html_search_regex`,
### Trailing parentheses
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, multi-line literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
#### Examples
Correct:
```python
url = traverse_obj(info, (
'context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'), list)
url = try_get(
info,
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
list)
```
Correct:
```python
url = traverse_obj(
info,
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
list)
url = try_get(info,
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
list)
```
Incorrect:
```python
url = traverse_obj(
url = try_get(
info,
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
list
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
list,
)
```
@@ -684,23 +643,27 @@ Wrap all extracted numeric data into safe functions from [`yt_dlp/utils.py`](yt_
Use `url_or_none` for safe URL processing.
Use `traverse_obj` and `try_call` (superseeds `dict_get` and `try_get`) for safe metadata extraction from parsed JSON.
Use `try_get`, `dict_get` and `traverse_obj` for safe metadata extraction from parsed JSON.
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
Explore [`yt_dlp/utils.py`](yt_dlp/utils.py) for more useful convenience functions.
#### Examples
#### More examples
##### Safely extract optional description from parsed JSON
```python
description = traverse_obj(response, ('result', 'video', 'summary'), expected_type=str)
thumbnails = traverse_obj(response, ('result', 'thumbnails', ..., 'url'), expected_type=url_or_none)
```
##### Safely extract more optional metadata
```python
video = traverse_obj(response, ('result', 'video', 0), default={}, expected_type=dict)
description = video.get('summary')
duration = float_or_none(video.get('durationMs'), scale=1000)
view_count = int_or_none(video.get('views'))
```
# My pull request is labeled pending-fixes
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.

View File

@@ -214,56 +214,3 @@ pycabbage
regarten
Ronnnny
schn0sch
s0u1h
MrRawes
cffswb
danielyli
1-Byte
mehq
dzek69
aaearon
panatexxa
kmark
un-def
goggle
Soebb
Fam0r
bohwaz
dodrian
vvto33
ca-za
connercsbn
diegorodriguezv
ekangmonyet
elyse0
evansp
GiedriusS
HE7086
JordanWeatherby
m4tu4g
MarwenDallel
nevack
putnam
rand-net
vertan
Wikidepia
Yipten
moench-tegeder
christoph-heinrich
HobbyistDev
LunarFang416
sbor23
aurelg
adamanldo
gamer191
vkorablin
Burve
mnn
ZhymabekRoman
mozbugbox
aejdl
ping
sqrtNOT
bubbleguuum
darkxex
miseran

View File

@@ -11,388 +11,6 @@
-->
### 2022.06.22
* [**Deprecate support for Python 3.6**](https://github.com/yt-dlp/yt-dlp/issues/3764#issuecomment-1154051119)
* **Add option `--download-sections` to download video partially**
* Chapter regex and time ranges are accepted (Eg: `--download-sections *1:10-2:20`)
* Add option `--alias`
* Add option `--lazy-playlist` to process entries as they are received
* Add option `--retry-sleep`
* Add slicing notation to `--playlist-items`
* Adds support for negative indices and step
* Add `-I` as alias for `--playlist-index`
* Makes `--playlist-start`, `--playlist-end`, `--playlist-reverse`, `--no-playlist-reverse` redundant
* `--config-location -` to provide options interactively
* [build] Add Linux standalone builds
* [update] Self-restart after update
* Merge youtube-dl: Upto [commit/8a158a9](https://github.com/ytdl-org/youtube-dl/commit/8a158a9)
* Add `--no-update`
* Allow extractors to specify section_start/end for clips
* Do not print progress to `stderr` with `-q`
* Ensure pre-processor errors do not block video download
* Fix `--simulate --max-downloads`
* Improve error handling of bad config files
* Return an error code if update fails
* Fix bug in [3a408f9](https://github.com/yt-dlp/yt-dlp/commit/3a408f9d199127ca2626359e21a866a09ab236b3)
* [ExtractAudio] Allow conditional conversion
* [ModifyChapters] Fix repeated removal of small segments
* [ThumbnailsConvertor] Allow conditional conversion
* [cookies] Detect profiles for cygwin/BSD by [moench-tegeder](https://github.com/moench-tegeder)
* [dash] Show fragment count with `--live-from-start` by [flashdagger](https://github.com/flashdagger)
* [extractor] Add `_search_json` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
* [extractor] Add `default` parameter to `_search_json` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
* [extractor] Add dev option `--load-pages`
* [extractor] Handle `json_ld` with multiple `@type`s
* [extractor] Import `_ALL_CLASSES` lazily
* [extractor] Recognize `src` attribute from HTML5 media elements by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/generic] Revert e6ae51c123897927eb3c9899923d8ffd31c7f85d
* [f4m] Bugfix
* [ffmpeg] Check version lazily
* [jsinterp] Some optimizations and refactoring by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
* [utils] Improve performance using `functools.cache`
* [utils] Send HTTP/1.1 ALPN extension by [coletdjnz](https://github.com/coletdjnz)
* [utils] `ExtractorError`: Fix `exc_info`
* [utils] `ISO3166Utils`: Add `EU` and `AP`
* [utils] `Popen`: Refactor to use contextmanager
* [utils] `locked_file`: Fix for PyPy on Windows
* [update] Expose more functionality to API
* [update] Use `.git` folder to distinguish `source`/`unknown`
* [compat] Add `functools.cached_property`
* [test] Fix `FakeYDL` signatures by [coletdjnz](https://github.com/coletdjnz)
* [docs] Improvements
* [cleanup, ExtractAudio] Refactor
* [cleanup, downloader] Refactor `report_progress`
* [cleanup, extractor] Refactor `_download_...` methods
* [cleanup, extractor] Rename `extractors.py` to `_extractors.py`
* [cleanup, utils] Don't use kwargs for `format_field`
* [cleanup, build] Refactor
* [cleanup, docs] Re-indent "Usage and Options" section
* [cleanup] Deprecate `YoutubeDL.parse_outtmpl`
* [cleanup] Misc fixes and cleanup by [Lesmiscore](https://github.com/Lesmiscore), [MrRawes](https://github.com/MrRawes), [christoph-heinrich](https://github.com/christoph-heinrich), [flashdagger](https://github.com/flashdagger), [gamer191](https://github.com/gamer191), [kwconder](https://github.com/kwconder), [pukkandan](https://github.com/pukkandan)
* [extractor/DailyWire] Add extractors by [HobbyistDev](https://github.com/HobbyistDev), [pukkandan](https://github.com/pukkandan)
* [extractor/fourzerostudio] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/GoogleDrive] Add folder extractor by [evansp](https://github.com/evansp), [pukkandan](https://github.com/pukkandan)
* [extractor/MirrorCoUK] Add extractor by [LunarFang416](https://github.com/LunarFang416), [pukkandan](https://github.com/pukkandan)
* [extractor/atscaleconfevent] Add extractor by [Ashish0804](https://github.com/Ashish0804)
* [extractor/freetv] Add extractor by [elyse0](https://github.com/elyse0)
* [extractor/ixigua] Add Extractor by [HobbyistDev](https://github.com/HobbyistDev)
* [extractor/kicker.de] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
* [extractor/netverse] Add extractors by [HobbyistDev](https://github.com/HobbyistDev), [pukkandan](https://github.com/pukkandan)
* [extractor/playsuisse] Add extractor by [pukkandan](https://github.com/pukkandan), [sbor23](https://github.com/sbor23)
* [extractor/substack] Add extractor by [elyse0](https://github.com/elyse0)
* [extractor/youtube] **Support downloading clips**
* [extractor/youtube] Add `innertube_host` and `innertube_key` extractor args by [coletdjnz](https://github.com/coletdjnz)
* [extractor/youtube] Add warning for PostLiveDvr
* [extractor/youtube] Bring back `_extract_chapters_from_description`
* [extractor/youtube] Extract `comment_count` from webpage
* [extractor/youtube] Fix `:ytnotifications` extractor by [coletdjnz](https://github.com/coletdjnz)
* [extractor/youtube] Fix initial player response extraction by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
* [extractor/youtube] Fix live chat for videos with content warning by [coletdjnz](https://github.com/coletdjnz)
* [extractor/youtube] Make signature extraction non-fatal
* [extractor/youtube:tab] Detect `videoRenderer` in `_post_thread_continuation_entries`
* [extractor/BiliIntl] Fix metadata extraction
* [extractor/BiliIntl] Fix subtitle extraction by [HobbyistDev](https://github.com/HobbyistDev)
* [extractor/FranceCulture] Fix extractor by [aurelg](https://github.com/aurelg), [pukkandan](https://github.com/pukkandan)
* [extractor/PokemonSoundLibrary] Remove extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/StreamCZ] Fix extractor by [adamanldo](https://github.com/adamanldo), [dirkf](https://github.com/dirkf)
* [extractor/WatchESPN] Support free videos and BAM_DTC by [ischmidt20](https://github.com/ischmidt20)
* [extractor/animelab] Remove extractor by [gamer191](https://github.com/gamer191)
* [extractor/bloomberg] Change playback endpoint by [m4tu4g](https://github.com/m4tu4g)
* [extractor/ccc] Extract view_count by [vkorablin](https://github.com/vkorablin)
* [extractor/crunchyroll:beta] Fix extractor after API change by [Burve](https://github.com/Burve), [tejing1](https://github.com/tejing1)
* [extractor/curiositystream] Get `auth_token` from cookie by [mnn](https://github.com/mnn)
* [extractor/digitalconcerthall] Fix extractor by [ZhymabekRoman](https://github.com/ZhymabekRoman)
* [extractor/dropbox] Extract the correct `mountComponent`
* [extractor/dropout] Login is not mandatory
* [extractor/duboku] Fix for hostname change by [mozbugbox](https://github.com/mozbugbox)
* [extractor/espn] Add `WatchESPN` extractor by [ischmidt20](https://github.com/ischmidt20), [pukkandan](https://github.com/pukkandan)
* [extractor/expressen] Fix extractor by [aejdl](https://github.com/aejdl)
* [extractor/foxnews] Update embed extraction by [elyse0](https://github.com/elyse0)
* [extractor/ina] Fix extractor by [elyse0](https://github.com/elyse0)
* [extractor/iwara:user] Make paging better by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/jwplatform] Look for `data-video-jw-id`
* [extractor/lbry] Update livestream API by [flashdagger](https://github.com/flashdagger)
* [extractor/mediaset] Improve `_VALID_URL`
* [extractor/naver] Add `navernow` extractor by [ping](https://github.com/ping)
* [extractor/niconico:series] Fix extractor by [sqrtNOT](https://github.com/sqrtNOT)
* [extractor/npr] Use stream url from json-ld by [r5d](https://github.com/r5d)
* [extractor/pornhub] Extract `uploader_id` field by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/radiofrance] Add more radios by [bubbleguuum](https://github.com/bubbleguuum)
* [extractor/rumble] Detect JS embed
* [extractor/rumble] Extract subtitles by [fstirlitz](https://github.com/fstirlitz)
* [extractor/southpark] Add `southpark.lat` extractor by [darkxex](https://github.com/darkxex)
* [extractor/spotify:show] Fix extractor
* [extractor/tiktok] Detect embeds
* [extractor/tiktok] Extract `SIGI_STATE` by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan), [sulyi](https://github.com/sulyi)
* [extractor/tver] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/vevo] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [extractor/yahoo:gyao] Fix extractor
* [extractor/zattoo] Fix live streams by [miseran](https://github.com/miseran)
* [extractor/zdf] Improve format sorting by [elyse0](https://github.com/elyse0)
### 2022.05.18
* Add support for SSL client certificate authentication by [coletdjnz](https://github.com/coletdjnz), [dirkf](https://github.com/dirkf)
* Adds `--client-certificate`, `--client-certificate-key`, `--client-certificate-password`
* Add `--match-filter -` to interactively ask for each video
* `--max-downloads` should obey `--break-per-input`
* Allow use of weaker ciphers with `--legacy-server-connect`
* Don't imply `-s` for later stages of `-O`
* Fix `--date today`
* Fix `--skip-unavailable-fragments`
* Fix color in `-q -F`
* Fix redirect HTTP method handling by [coletdjnz](https://github.com/coletdjnz)
* Improve `--clean-infojson`
* Remove warning for videos with an empty title
* Run `FFmpegFixupM3u8PP` for live-streams if needed
* Show name of downloader in verbose log
* [cookies] Allow `cookiefile` to be a text stream
* [cookies] Report progress when importing cookies
* [downloader/ffmpeg] Specify headers for each URL by [elyse0](https://github.com/elyse0)
* [fragment] Do not change chunk-size when `--test`
* [fragment] Make single thread download work for `--live-from-start` by [Lesmiscore](https://github.com/Lesmiscore)
* [hls] Fix `byte_range` for `EXT-X-MAP` fragment by [fstirlitz](https://github.com/fstirlitz)
* [http] Fix retrying on read timeout by [coletdjnz](https://github.com/coletdjnz)
* [ffmpeg] Fix features detection
* [EmbedSubtitle] Enable for more video extensions
* [EmbedThumbnail] Disable thumbnail conversion for mkv by [evansp](https://github.com/evansp)
* [EmbedThumbnail] Do not obey `-k`
* [EmbedThumbnail] Do not remove id3v1 tags
* [FFmpegMetadata] Remove `\0` from metadata
* [FFmpegMetadata] Remove filename from attached info-json
* [FixupM3u8] Obey `--hls-prefer-mpegts`
* [Sponsorblock] Don't crash when duration is unknown
* [XAttrMetadata] Refactor and document dependencies
* [extractor] Document netrc machines
* [extractor] Update `manifest_url`s after redirect by [elyse0](https://github.com/elyse0)
* [extractor] Update dash `manifest_url` after redirects by [elyse0](https://github.com/elyse0)
* [extractor] Use `classmethod`/`property` where possible
* [generic] Refactor `_extract_rss`
* [utils] `is_html`: Handle double BOM
* [utils] `locked_file`: Ignore illegal seek on `truncate` by [jakeogh](https://github.com/jakeogh)
* [utils] `sanitize_path`: Fix when path is empty string
* [utils] `write_string`: Workaround newline issue in `conhost`
* [utils] `certifi`: Make sure the pem file exists
* [utils] Fix `WebSocketsWrapper`
* [utils] `locked_file`: Do not give executable bits for newly created files by [Lesmiscore](https://github.com/Lesmiscore)
* [utils] `YoutubeDLCookieJar`: Detect and reject JSON file by [Lesmiscore](https://github.com/Lesmiscore)
* [test] Convert warnings into errors and fix some existing warnings by [fstirlitz](https://github.com/fstirlitz)
* [dependencies] Create module with all dependency imports
* [compat] Split into sub-modules by [fstirlitz](https://github.com/fstirlitz), [pukkandan](https://github.com/pukkandan)
* [compat] Implement `compat.imghdr`
* [build] Add `make uninstall` by [MrRawes](https://github.com/MrRawes)
* [build] Avoid use of `install -D`
* [build] Fix `Makefile` by [putnam](https://github.com/putnam)
* [build] Fix `--onedir` on macOS
* [build] Add more test-runners
* [cleanup] Deprecate some compat vars by [fstirlitz](https://github.com/fstirlitz), [pukkandan](https://github.com/pukkandan)
* [cleanup] Remove unused code paths, extractors, scripts and tests by [fstirlitz](https://github.com/fstirlitz)
* [cleanup] Upgrade syntax (`pyupgrade`) and sort imports (`isort`)
* [cleanup, docs, build] Misc fixes
* [BilibiliLive] Add extractor by [HE7086](https://github.com/HE7086), [pukkandan](https://github.com/pukkandan)
* [Fifa] Add Extractor by [Bricio](https://github.com/Bricio)
* [goodgame] Add extractor by [nevack](https://github.com/nevack)
* [gronkh] Add playlist extractors by [hatienl0i261299](https://github.com/hatienl0i261299)
* [icareus] Add extractor by [tpikonen](https://github.com/tpikonen), [pukkandan](https://github.com/pukkandan)
* [iwara] Add playlist extractors by [i6t](https://github.com/i6t)
* [Likee] Add extractor by [hatienl0i261299](https://github.com/hatienl0i261299)
* [masters] Add extractor by [m4tu4g](https://github.com/m4tu4g)
* [nebula] Add support for subscriptions by [hheimbuerger](https://github.com/hheimbuerger)
* [Podchaser] Add extractors by [connercsbn](https://github.com/connercsbn)
* [rokfin:search] Add extractor by [P-reducible](https://github.com/P-reducible), [pukkandan](https://github.com/pukkandan)
* [youtube] Add `:ytnotifications` extractor by [krichbanana](https://github.com/krichbanana)
* [youtube] Add YoutubeStoriesIE (`ytstories:<channel UCID>`) by [coletdjnz](https://github.com/coletdjnz)
* [ZingMp3] Add chart and user extractors by [hatienl0i261299](https://github.com/hatienl0i261299)
* [adn] Update AES key by [elyse0](https://github.com/elyse0)
* [adobepass] Allow cookies for authenticating MSO
* [bandcamp] Exclude merch links by [Yipten](https://github.com/Yipten)
* [chingari] Fix archiving and tests
* [DRTV] Improve `_VALID_URL` by [vertan](https://github.com/vertan)
* [facebook] Improve thumbnail extraction by [Wikidepia](https://github.com/Wikidepia)
* [fc2] Stop heatbeating once FFmpeg finishes by [Lesmiscore](https://github.com/Lesmiscore)
* [Gofile] Fix extraction and support password-protected links by [mehq](https://github.com/mehq)
* [hotstar, cleanup] Refactor extractors
* [InfoQ] Don't fail on missing audio format by [evansp](https://github.com/evansp)
* [Jamendo] Extract more metadata by [evansp](https://github.com/evansp)
* [kaltura] Update API calls by [flashdagger](https://github.com/flashdagger)
* [KhanAcademy] Fix extractor by [rand-net](https://github.com/rand-net)
* [LCI] Fix extractor by [MarwenDallel](https://github.com/MarwenDallel)
* [lrt] Support livestreams by [GiedriusS](https://github.com/GiedriusS)
* [niconico] Set `expected_protocol` to a public field
* [Niconico] Support 2FA by [ekangmonyet](https://github.com/ekangmonyet)
* [Olympics] Fix format extension
* [openrec:movie] Enable fallback for /movie/ URLs
* [PearVideo] Add fallback for formats by [hatienl0i261299](https://github.com/hatienl0i261299)
* [radiko] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [rai] Add `release_year`
* [reddit] Prevent infinite loop
* [rokfin] Implement login by [P-reducible](https://github.com/P-reducible), [pukkandan](https://github.com/pukkandan)
* [ruutu] Support hs.fi embeds by [tpikonen](https://github.com/tpikonen), [pukkandan](https://github.com/pukkandan)
* [spotify] Detect iframe embeds by [fstirlitz](https://github.com/fstirlitz)
* [telegram] Fix metadata extraction
* [tmz, cleanup] Update tests by [diegorodriguezv](https://github.com/diegorodriguezv)
* [toggo] Fix `_VALID_URL` by [ca-za](https://github.com/ca-za)
* [trovo] Update to new API by [nyuszika7h](https://github.com/nyuszika7h)
* [TVer] Improve extraction by [Lesmiscore](https://github.com/Lesmiscore)
* [twitcasting] Pass headers for each formats by [Lesmiscore](https://github.com/Lesmiscore)
* [VideocampusSachsen] Improve extractor by [FestplattenSchnitzel](https://github.com/FestplattenSchnitzel)
* [vimeo] Fix extractors
* [wat] Fix extraction of multi-language videos and subtitles by [elyse0](https://github.com/elyse0)
* [wistia] Fix `_VALID_URL` by [dirkf](https://github.com/dirkf)
* [youtube, cleanup] Minor refactoring by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
* [youtube] Added piped instance urls by [JordanWeatherby](https://github.com/JordanWeatherby)
* [youtube] Deprioritize auto-generated thumbnails
* [youtube] Deprioritize format 22 (often damaged)
* [youtube] Fix episode metadata extraction
* [zee5] Fix extractor by [Ashish0804](https://github.com/Ashish0804)
* [zingmp3, cleanup] Refactor extractors
### 2022.04.08
* Use certificates from `certifi` if installed by [coletdjnz](https://github.com/coletdjnz)
* Treat multiple `--match-filters` as OR
* File locking improvements:
* Do not lock downloading file on Windows
* Do not prevent download if locking is unsupported
* Do not truncate files before locking by [jakeogh](https://github.com/jakeogh), [pukkandan](https://github.com/pukkandan)
* Fix non-blocking non-exclusive lock
* De-prioritize automatic-subtitles when no `--sub-lang` is given
* Exit after `--dump-user-agent`
* Fallback to video-only format when selecting by extension
* Fix `--abort-on-error` for subtitles
* Fix `--no-overwrite` for playlist infojson
* Fix `--print` with `--ignore-no-formats` when url is `None` by [flashdagger](https://github.com/flashdagger)
* Fix `--sleep-interval`
* Fix `--throttled-rate`
* Fix `autonumber`
* Fix case of `http_headers`
* Fix filepath sanitization in `--print-to-file`
* Handle float in `--wait-for-video`
* Ignore `mhtml` formats from `-f mergeall`
* Ignore format-specific fields in initial pass of `--match-filter`
* Protect stdout from unexpected progress and console-title
* Remove `Accept-Encoding` header from `std_headers` by [coletdjnz](https://github.com/coletdjnz)
* Remove incorrect warning for `--dateafter`
* Show warning when all media formats have DRM
* [downloader] Fix invocation of `HttpieFD`
* [http] Fix #3215
* [http] Reject broken range before request by [Lesmiscore](https://github.com/Lesmiscore), [Jules-A](https://github.com/Jules-A), [pukkandan](https://github.com/pukkandan)
* [fragment] Read downloaded fragments only when needed by [Lesmiscore](https://github.com/Lesmiscore)
* [http] Retry on more errors by [coletdjnz](https://github.com/coletdjnz)
* [mhtml] Fix fragments with absolute urls by [coletdjnz](https://github.com/coletdjnz)
* [extractor] Add `_perform_login` function
* [extractor] Allow control characters inside json
* [extractor] Support merging subtitles with data by [coletdjnz](https://github.com/coletdjnz)
* [generic] Extract subtitles from video.js by [Lesmiscore](https://github.com/Lesmiscore)
* [ffmpeg] Cache version data
* [FFmpegConcat] Ensure final directory exists
* [FfmpegMetadata] Write id3v1 tags
* [FFmpegVideoConvertor] Add more formats to `--remux-video`
* [FFmpegVideoConvertor] Ensure all streams are copied
* [MetadataParser] Validate outtmpl early
* [outtmpl] Fix replacement/default when used with alternate
* [outtmpl] Limit changes during sanitization
* [phantomjs] Fix bug
* [test] Add `test_locked_file`
* [utils] `format_decimal_suffix`: Fix for very large numbers by [s0u1h](https://github.com/s0u1h)
* [utils] `traverse_obj`: Allow filtering by value
* [utils] Add `filter_dict`, `get_first`, `try_call`
* [utils] ExtractorError: Fix for older python versions
* [utils] WebSocketsWrapper: Allow omitting `__enter__` invocation by [Lesmiscore](https://github.com/Lesmiscore)
* [docs] Add an `.editorconfig` file by [fstirlitz](https://github.com/fstirlitz)
* [docs] Clarify the exact `BSD` license of dependencies by [MrRawes](https://github.com/MrRawes)
* [docs] Minor improvements by [pukkandan](https://github.com/pukkandan), [cffswb](https://github.com/cffswb), [danielyli](https://github.com/danielyli)
* [docs] Remove readthedocs
* [build] Add `requirements.txt` to pip distributions
* [cleanup, postprocessor] Create `_download_json`
* [cleanup, vimeo] Fix tests
* [cleanup] Misc fixes and minor cleanup
* [cleanup] Use `_html_extract_title`
* [AfreecaTV] Add `AfreecaTVUserIE` by [hatienl0i261299](https://github.com/hatienl0i261299)
* [arte] Add `format_note` to m3u8 formats
* [azmedien] Add TVO Online to supported hosts by [1-Byte](https://github.com/1-Byte)
* [BanBye] Add extractor by [mehq](https://github.com/mehq)
* [bilibili] Fix extraction of title with quotes by [dzek69](https://github.com/dzek69)
* [Craftsy] Add extractor by [Bricio](https://github.com/Bricio)
* [Cybrary] Add extractor by [aaearon](https://github.com/aaearon)
* [Huya] Add extractor by [hatienl0i261299](https://github.com/hatienl0i261299)
* [ITProTV] Add extractor by [aaearon](https://github.com/aaearon)
* [Jable] Add extractors by [mehq](https://github.com/mehq)
* [LastFM] Add extractors by [mehq](https://github.com/mehq)
* [Moviepilot] Add extractor by [panatexxa](https://github.com/panatexxa)
* [panopto] Add extractors by [coletdjnz](https://github.com/coletdjnz), [kmark](https://github.com/kmark)
* [PokemonSoundLibrary] Add extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [WasdTV] Add extractor by [un-def](https://github.com/un-def), [hatienl0i261299](https://github.com/hatienl0i261299)
* [adobepass] Fix Suddenlink MSO by [CplPwnies](https://github.com/CplPwnies)
* [afreecatv] Match new vod url by [wlritchi](https://github.com/wlritchi)
* [AZMedien] Support `tv.telezueri.ch` by [goggle](https://github.com/goggle)
* [BiliIntl] Support user-generated videos by [wlritchi](https://github.com/wlritchi)
* [BRMediathek] Fix VALID_URL
* [crunchyroll:playlist] Implement beta API by [tejing1](https://github.com/tejing1)
* [crunchyroll] Fix inheritance
* [daftsex] Fix extractor by [Soebb](https://github.com/Soebb)
* [dailymotion] Support `geo.dailymotion.com` by [hatienl0i261299](https://github.com/hatienl0i261299)
* [ellentube] Extract subtitles from manifest
* [elonet] Rewrite extractor by [Fam0r](https://github.com/Fam0r), [pukkandan](https://github.com/pukkandan)
* [fptplay] Fix metadata extraction by [hatienl0i261299](https://github.com/hatienl0i261299)
* [FranceCulture] Support playlists by [bohwaz](https://github.com/bohwaz)
* [go, viu] Extract subtitles from the m3u8 manifest by [fstirlitz](https://github.com/fstirlitz)
* [Imdb] Improve extractor by [hatienl0i261299](https://github.com/hatienl0i261299)
* [MangoTV] Improve extractor by [hatienl0i261299](https://github.com/hatienl0i261299)
* [Nebula] Fix bug in 52efa4b31200119adaa8acf33e50b84fcb6948f0
* [niconico] Fix extraction of thumbnails and uploader (#3266)
* [niconico] Rewrite NiconicoIE by [Lesmiscore](https://github.com/Lesmiscore)
* [nitter] Minor fixes and update instance list by [foghawk](https://github.com/foghawk)
* [NRK] Extract timestamp by [hatienl0i261299](https://github.com/hatienl0i261299)
* [openrec] Download archived livestreams by [Lesmiscore](https://github.com/Lesmiscore)
* [openrec] Refactor extractors by [Lesmiscore](https://github.com/Lesmiscore)
* [panopto] Improve subtitle extraction and support slides by [coletdjnz](https://github.com/coletdjnz)
* [ParamountPlus, CBS] Change VALID_URL by [Sipherdrakon](https://github.com/Sipherdrakon)
* [ParamountPlusSeries] Support multiple pages by [dodrian](https://github.com/dodrian)
* [Piapro] Extract description with break lines by [Lesmiscore](https://github.com/Lesmiscore)
* [rai] Fix extraction of http formas by [nixxo](https://github.com/nixxo)
* [rumble] unescape title
* [RUTV] Fix format sorting by [Lesmiscore](https://github.com/Lesmiscore)
* [ruutu] Detect embeds by [tpikonen](https://github.com/tpikonen)
* [tenplay] Improve extractor by [aarubui](https://github.com/aarubui)
* [TikTok] Fix URLs with user id by [hatienl0i261299](https://github.com/hatienl0i261299)
* [TikTokVM] Fix redirect to user URL
* [TVer] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
* [TVer] Support landing page by [vvto33](https://github.com/vvto33)
* [twitcasting] Don't return multi_video for archive with single hls manifest by [Lesmiscore](https://github.com/Lesmiscore)
* [veo] Fix `_VALID_URL`
* [Veo] Fix extractor by [i6t](https://github.com/i6t)
* [viki] Don't attempt to modify URLs with signature by [nyuszika7h](https://github.com/nyuszika7h)
* [viu] Fix bypass for preview by [zackmark29](https://github.com/zackmark29)
* [viu] Fixed extractor by [zackmark29](https://github.com/zackmark29), [pukkandan](https://github.com/pukkandan)
* [web.archive:youtube] Make CDX API requests non-fatal by [coletdjnz](https://github.com/coletdjnz)
* [wget] Fix proxy by [kikuyan](https://github.com/kikuyan), [coletdjnz](https://github.com/coletdjnz)
* [xnxx] Add `xnxx3.com` by [rozari0](https://github.com/rozari0)
* [youtube] **Add new age-gate bypass** by [zerodytrash](https://github.com/zerodytrash), [pukkandan](https://github.com/pukkandan)
* [youtube] Add extractor-arg to skip auto-translated subs
* [youtube] Avoid false positives when detecting damaged formats
* [youtube] Detect DRM better by [shirt](https://github.com/shirt-dev)
* [youtube] Fix auto-translated automatic captions
* [youtube] Fix pagination of `membership` tab
* [youtube] Fix uploader for collaborative playlists by [coletdjnz](https://github.com/coletdjnz)
* [youtube] Improve video upload date handling by [coletdjnz](https://github.com/coletdjnz)
* [youtube:api] Prefer minified JSON response by [coletdjnz](https://github.com/coletdjnz)
* [youtube:search] Support hashtag entries by [coletdjnz](https://github.com/coletdjnz)
* [youtube:tab] Fix duration extraction for shorts by [coletdjnz](https://github.com/coletdjnz)
* [youtube:tab] Minor improvements
* [youtube:tab] Return shorts url if video is a short by [coletdjnz](https://github.com/coletdjnz)
* [Zattoo] Fix extractors by [goggle](https://github.com/goggle)
* [Zingmp3] Fix signature by [hatienl0i261299](https://github.com/hatienl0i261299)
### 2022.03.08.1
* [cleanup] Refactor `__init__.py`
* [build] Fix bug
### 2022.03.08
* Merge youtube-dl: Upto [commit/6508688](https://github.com/ytdl-org/youtube-dl/commit/6508688e88c83bb811653083db9351702cd39a6a) (except NDR)
@@ -411,14 +29,14 @@
* Set `webpage_url_...` from `webpage_url` and not input URL
* Tolerate failure to `--write-link` due to unknown URL
* [aria2c] Add `--http-accept-gzip=true`
* [build] Update pyinstaller to 4.10 by [shirt](https://github.com/shirt-dev)
* [build] Update pyinstaller to 4.10 by [shirt-dev](https://github.com/shirt-dev)
* [cookies] Update MacOS12 `Cookies.binarycookies` location by [mdpauley](https://github.com/mdpauley)
* [devscripts] Improve `prepare_manpage`
* [downloader] Do not use aria2c for non-native `m3u8`
* [downloader] Obey `--file-access-retries` when deleting/renaming by [ehoogeveen-medweb](https://github.com/ehoogeveen-medweb)
* [extractor] Allow `http_headers` to be specified for `thumbnails`
* [extractor] Extract subtitles from manifests for vimeo, globo, kaltura, svt by [fstirlitz](https://github.com/fstirlitz)
* [extractor] Fix for manifests without period duration by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
* [extractor] Fix for manifests without period duration by [dirkf,](https://github.com/dirkf,) [pukkandan](https://github.com/pukkandan)
* [extractor] Support `--mark-watched` without `_NETRC_MACHINE` by [coletdjnz](https://github.com/coletdjnz)
* [FFmpegConcat] Abort on `--simulate`
* [FormatSort] Consider `acodec`=`ogg` as `vorbis`
@@ -453,9 +71,9 @@
* [nfb] Add extractor by [ofkz](https://github.com/ofkz)
* [niconico] Add playlist extractors and refactor by [Lesmiscore](https://github.com/Lesmiscore)
* [peekvids] Add extractor by [schn0sch](https://github.com/schn0sch)
* [piapro] Add extractor by [pycabbage](https://github.com/pycabbage), [Lesmiscore](https://github.com/Lesmiscore)
* [rokfin] Add extractor by [P-reducible](https://github.com/P-reducible), [pukkandan](https://github.com/pukkandan)
* [rokfin] Add stack and channel extractors by [P-reducible](https://github.com/P-reducible), [pukkandan](https://github.com/pukkandan)
* [piapro] Add extractor by [pycabbage,](https://github.com/pycabbage,) [Lesmiscore](https://github.com/Lesmiscore)
* [rokfin] Add extractor by [P-reducible,](https://github.com/P-reducible,) [pukkandan](https://github.com/pukkandan)
* [rokfin] Add stack and channel extractors by [P-reducible,](https://github.com/P-reducible,) [pukkandan](https://github.com/pukkandan)
* [ruv.is] Add extractor by [iw0nderhow](https://github.com/iw0nderhow)
* [telegram] Add extractor by [hatienl0i261299](https://github.com/hatienl0i261299)
* [VideocampusSachsen] Add extractors by [FestplattenSchnitzel](https://github.com/FestplattenSchnitzel)
@@ -1024,7 +642,7 @@
* [build] Improvements
* Build standalone MacOS packages by [smplayer-dev](https://github.com/smplayer-dev)
* Release windows exe built with `py2exe`
* Enable lazy-extractors in releases
* Enable lazy-extractors in releases.
* Set env var `YTDLP_NO_LAZY_EXTRACTORS` to forcefully disable this (experimental)
* Clean up error reporting in update
* Refactor `pyinst.py`, misc cleanup and improve docs
@@ -1277,7 +895,7 @@
* [build] Automate more of the release process by [animelover1984](https://github.com/animelover1984), [pukkandan](https://github.com/pukkandan)
* [build] Fix sha256 by [nihil-admirari](https://github.com/nihil-admirari)
* [build] Bring back brew taps by [nao20010128nao](https://github.com/nao20010128nao)
* [build] Provide `--onedir` zip for windows
* [build] Provide `--onedir` zip for windows by [pukkandan](https://github.com/pukkandan)
* [cleanup,docs] Add deprecation warning in docs for some counter intuitive behaviour
* [cleanup] Fix line endings for `nebula.py` by [glenn-slayden](https://github.com/glenn-slayden)
* [cleanup] Improve `make clean-test` by [sulyi](https://github.com/sulyi)
@@ -1632,7 +1250,7 @@
* [youtube] Non-fatal alert reporting for unavailable videos page by [coletdjnz](https://github.com/coletdjnz)
* [twitcasting] Websocket support by [nao20010128nao](https://github.com/nao20010128nao)
* [mediasite] Extract slides by [fstirlitz](https://github.com/fstirlitz)
* [funimation] Extract subtitles
* [funimation] Extract subtitles
* [pornhub] Extract `cast`
* [hotstar] Use server time for authentication instead of local time
* [EmbedThumbnail] Fix for already downloaded thumbnail
@@ -1728,7 +1346,7 @@
### 2021.05.20
* **Youtube improvements**:
* **Youtube improvements**:
* Support youtube music `MP`, `VL` and `browse` pages
* Extract more formats for youtube music by [craftingmod](https://github.com/craftingmod), [coletdjnz](https://github.com/coletdjnz) and [pukkandan](https://github.com/pukkandan)
* Extract multiple subtitles in same language by [pukkandan](https://github.com/pukkandan) and [tpikonen](https://github.com/tpikonen)
@@ -2270,7 +1888,7 @@
* **Format Sort:** Added `--format-sort` (`-S`), `--format-sort-force` (`--S-force`) - See [Sorting Formats](README.md#sorting-formats) for details
* **Format Selection:** See [Format Selection](README.md#format-selection) for details
* New format selectors: `best*`, `worst*`, `bestvideo*`, `bestaudio*`, `worstvideo*`, `worstaudio*`
* Changed video format sorting to show video only files and video+audio files together
* Changed video format sorting to show video only files and video+audio files together.
* Added `--video-multistreams`, `--no-video-multistreams`, `--audio-multistreams`, `--no-audio-multistreams`
* Added `b`,`w`,`v`,`a` as alias for `best`, `worst`, `video` and `audio` respectively
* Shortcut Options: Added `--write-link`, `--write-url-link`, `--write-webloc-link`, `--write-desktop-link` by [h-h-h-h](https://github.com/h-h-h-h) - See [Internet Shortcut Options](README.md#internet-shortcut-options) for details

View File

@@ -29,7 +29,6 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
* Added support for downloading YoutubeWebArchive videos
* Added support for new websites MainStreaming, PRX, nzherald, etc

View File

@@ -5,6 +5,5 @@ include README.md
include completions/*/*
include supportedsites.md
include yt-dlp.1
include requirements.txt
recursive-include devscripts *
recursive-include test *

View File

@@ -9,8 +9,7 @@ tar: yt-dlp.tar.gz
# Keep this list in sync with MANIFEST.in
# intended use: when building a source distribution,
# make pypi-files && python setup.py sdist
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
completions yt-dlp.1 requirements.txt setup.cfg devscripts/* test/*
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites completions yt-dlp.1 devscripts/* test/*
.PHONY: all clean install test tar pypi-files completions ot offlinetest codetest supportedsites
@@ -23,9 +22,7 @@ clean-dist:
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
clean-cache:
find . \( \
-type d -name .pytest_cache -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
\) -prune -exec rm -rf {} \;
find . \( -name "*.pyc" -o -name "*.class" \) -delete
completion-bash: completions/bash/yt-dlp
completion-fish: completions/fish/yt-dlp.fish
@@ -46,23 +43,11 @@ SYSCONFDIR = $(shell if [ $(PREFIX) = /usr -o $(PREFIX) = /usr/local ]; then ech
MARKDOWN = $(shell if [ `pandoc -v | head -n1 | cut -d" " -f2 | head -c1` = "2" ]; then echo markdown-smart; else echo markdown; fi)
install: lazy-extractors yt-dlp yt-dlp.1 completions
mkdir -p $(DESTDIR)$(BINDIR)
install -m755 yt-dlp $(DESTDIR)$(BINDIR)/yt-dlp
mkdir -p $(DESTDIR)$(MANDIR)/man1
install -m644 yt-dlp.1 $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
mkdir -p $(DESTDIR)$(SHAREDIR)/bash-completion/completions
install -m644 completions/bash/yt-dlp $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
mkdir -p $(DESTDIR)$(SHAREDIR)/zsh/site-functions
install -m644 completions/zsh/_yt-dlp $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
mkdir -p $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d
install -m644 completions/fish/yt-dlp.fish $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
uninstall:
rm -f $(DESTDIR)$(BINDIR)/yt-dlp
rm -f $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
rm -f $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
rm -f $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
rm -f $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
install -Dm755 yt-dlp $(DESTDIR)$(BINDIR)/yt-dlp
install -Dm644 yt-dlp.1 $(DESTDIR)$(MANDIR)/man1/yt-dlp.1
install -Dm644 completions/bash/yt-dlp $(DESTDIR)$(SHAREDIR)/bash-completion/completions/yt-dlp
install -Dm644 completions/zsh/_yt-dlp $(DESTDIR)$(SHAREDIR)/zsh/site-functions/_yt-dlp
install -Dm644 completions/fish/yt-dlp.fish $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
codetest:
flake8 .
@@ -74,28 +59,25 @@ test:
offlinetest: codetest
$(PYTHON) -m pytest -k "not download"
# XXX: This is hard to maintain
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat \
yt_dlp/extractor/anvato_token_generator
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
mkdir -p zip
for d in $(CODE_FOLDERS) ; do \
for d in yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor ; do \
mkdir -p zip/$$d ;\
cp -pPR $$d/*.py zip/$$d/ ;\
done
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py zip/yt_dlp/*/*/*.py
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
mv zip/yt_dlp/__main__.py zip/
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py yt_dlp/*/*/*.py __main__.py
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
rm -rf zip
echo '#!$(PYTHON)' > yt-dlp
cat yt-dlp.zip >> yt-dlp
rm yt-dlp.zip
chmod a+x yt-dlp
README.md: yt_dlp/*.py yt_dlp/*/*.py devscripts/make_readme.py
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
README.md: yt_dlp/*.py yt_dlp/*/*.py
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --help | $(PYTHON) devscripts/make_readme.py
CONTRIBUTING.md: README.md devscripts/make_contributing.py
CONTRIBUTING.md: README.md
$(PYTHON) devscripts/make_contributing.py README.md CONTRIBUTING.md
issuetemplates: devscripts/make_issue_template.py .github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml .github/ISSUE_TEMPLATE_tmpl/2_site_support_request.yml .github/ISSUE_TEMPLATE_tmpl/3_site_feature_request.yml .github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml .github/ISSUE_TEMPLATE_tmpl/5_feature_request.yml yt_dlp/version.py
@@ -112,7 +94,7 @@ supportedsites:
README.txt: README.md
pandoc -f $(MARKDOWN) -t plain README.md -o README.txt
yt-dlp.1: README.md devscripts/prepare_manpage.py
yt-dlp.1: README.md
$(PYTHON) devscripts/prepare_manpage.py yt-dlp.1.temp.md
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
rm -f yt-dlp.1.temp.md
@@ -129,7 +111,7 @@ completions/fish/yt-dlp.fish: yt_dlp/*.py yt_dlp/*/*.py devscripts/fish-completi
mkdir -p completions/fish
$(PYTHON) devscripts/fish-completion.py
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -name '*.py' -and -not -name 'lazy_extractors.py')
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -iname '*.py' -and -not -iname 'lazy_extractors.py')
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
$(PYTHON) devscripts/make_lazy_extractors.py $@
@@ -141,14 +123,13 @@ yt-dlp.tar.gz: all
--exclude '*.pyo' \
--exclude '*~' \
--exclude '__pycache__' \
--exclude '.pytest_cache' \
--exclude '.git' \
-- \
README.md supportedsites.md Changelog.md LICENSE \
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
Makefile MANIFEST.in yt-dlp.1 README.txt completions \
setup.py setup.cfg yt-dlp yt_dlp requirements.txt \
devscripts test
devscripts test tox.ini pytest.ini
AUTHORS: .mailmap
git shortlog -s -n | cut -f2 | sort > AUTHORS

1762
README.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,11 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import os
from os.path import dirname as dirn
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
import yt_dlp
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
@@ -24,5 +26,5 @@ def build_completion(opt_parser):
f.write(filled_template)
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
parser = yt_dlp.parseOpts()[0]
build_completion(parser)

435
devscripts/buildserver.py Normal file
View File

@@ -0,0 +1,435 @@
# UNUSED
#!/usr/bin/python3
import argparse
import ctypes
import functools
import shutil
import subprocess
import sys
import tempfile
import threading
import traceback
import os.path
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
from yt_dlp.compat import (
compat_input,
compat_http_server,
compat_str,
compat_urlparse,
)
# These are not used outside of buildserver.py thus not in compat.py
try:
import winreg as compat_winreg
except ImportError: # Python 2
import _winreg as compat_winreg
try:
import socketserver as compat_socketserver
except ImportError: # Python 2
import SocketServer as compat_socketserver
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
allow_reuse_address = True
advapi32 = ctypes.windll.advapi32
SC_MANAGER_ALL_ACCESS = 0xf003f
SC_MANAGER_CREATE_SERVICE = 0x02
SERVICE_WIN32_OWN_PROCESS = 0x10
SERVICE_AUTO_START = 0x2
SERVICE_ERROR_NORMAL = 0x1
DELETE = 0x00010000
SERVICE_STATUS_START_PENDING = 0x00000002
SERVICE_STATUS_RUNNING = 0x00000004
SERVICE_ACCEPT_STOP = 0x1
SVCNAME = 'youtubedl_builder'
LPTSTR = ctypes.c_wchar_p
START_CALLBACK = ctypes.WINFUNCTYPE(None, ctypes.c_int, ctypes.POINTER(LPTSTR))
class SERVICE_TABLE_ENTRY(ctypes.Structure):
_fields_ = [
('lpServiceName', LPTSTR),
('lpServiceProc', START_CALLBACK)
]
HandlerEx = ctypes.WINFUNCTYPE(
ctypes.c_int, # return
ctypes.c_int, # dwControl
ctypes.c_int, # dwEventType
ctypes.c_void_p, # lpEventData,
ctypes.c_void_p, # lpContext,
)
def _ctypes_array(c_type, py_array):
ar = (c_type * len(py_array))()
ar[:] = py_array
return ar
def win_OpenSCManager():
res = advapi32.OpenSCManagerW(None, None, SC_MANAGER_ALL_ACCESS)
if not res:
raise Exception('Opening service manager failed - '
'are you running this as administrator?')
return res
def win_install_service(service_name, cmdline):
manager = win_OpenSCManager()
try:
h = advapi32.CreateServiceW(
manager, service_name, None,
SC_MANAGER_CREATE_SERVICE, SERVICE_WIN32_OWN_PROCESS,
SERVICE_AUTO_START, SERVICE_ERROR_NORMAL,
cmdline, None, None, None, None, None)
if not h:
raise OSError('Service creation failed: %s' % ctypes.FormatError())
advapi32.CloseServiceHandle(h)
finally:
advapi32.CloseServiceHandle(manager)
def win_uninstall_service(service_name):
manager = win_OpenSCManager()
try:
h = advapi32.OpenServiceW(manager, service_name, DELETE)
if not h:
raise OSError('Could not find service %s: %s' % (
service_name, ctypes.FormatError()))
try:
if not advapi32.DeleteService(h):
raise OSError('Deletion failed: %s' % ctypes.FormatError())
finally:
advapi32.CloseServiceHandle(h)
finally:
advapi32.CloseServiceHandle(manager)
def win_service_report_event(service_name, msg, is_error=True):
with open('C:/sshkeys/log', 'a', encoding='utf-8') as f:
f.write(msg + '\n')
event_log = advapi32.RegisterEventSourceW(None, service_name)
if not event_log:
raise OSError('Could not report event: %s' % ctypes.FormatError())
try:
type_id = 0x0001 if is_error else 0x0004
event_id = 0xc0000000 if is_error else 0x40000000
lines = _ctypes_array(LPTSTR, [msg])
if not advapi32.ReportEventW(
event_log, type_id, 0, event_id, None, len(lines), 0,
lines, None):
raise OSError('Event reporting failed: %s' % ctypes.FormatError())
finally:
advapi32.DeregisterEventSource(event_log)
def win_service_handler(stop_event, *args):
try:
raise ValueError('Handler called with args ' + repr(args))
TODO
except Exception as e:
tb = traceback.format_exc()
msg = str(e) + '\n' + tb
win_service_report_event(service_name, msg, is_error=True)
raise
def win_service_set_status(handle, status_code):
svcStatus = SERVICE_STATUS()
svcStatus.dwServiceType = SERVICE_WIN32_OWN_PROCESS
svcStatus.dwCurrentState = status_code
svcStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP
svcStatus.dwServiceSpecificExitCode = 0
if not advapi32.SetServiceStatus(handle, ctypes.byref(svcStatus)):
raise OSError('SetServiceStatus failed: %r' % ctypes.FormatError())
def win_service_main(service_name, real_main, argc, argv_raw):
try:
# args = [argv_raw[i].value for i in range(argc)]
stop_event = threading.Event()
handler = HandlerEx(functools.partial(stop_event, win_service_handler))
h = advapi32.RegisterServiceCtrlHandlerExW(service_name, handler, None)
if not h:
raise OSError('Handler registration failed: %s' %
ctypes.FormatError())
TODO
except Exception as e:
tb = traceback.format_exc()
msg = str(e) + '\n' + tb
win_service_report_event(service_name, msg, is_error=True)
raise
def win_service_start(service_name, real_main):
try:
cb = START_CALLBACK(
functools.partial(win_service_main, service_name, real_main))
dispatch_table = _ctypes_array(SERVICE_TABLE_ENTRY, [
SERVICE_TABLE_ENTRY(
service_name,
cb
),
SERVICE_TABLE_ENTRY(None, ctypes.cast(None, START_CALLBACK))
])
if not advapi32.StartServiceCtrlDispatcherW(dispatch_table):
raise OSError('ctypes start failed: %s' % ctypes.FormatError())
except Exception as e:
tb = traceback.format_exc()
msg = str(e) + '\n' + tb
win_service_report_event(service_name, msg, is_error=True)
raise
def main(args=None):
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--install',
action='store_const', dest='action', const='install',
help='Launch at Windows startup')
parser.add_argument('-u', '--uninstall',
action='store_const', dest='action', const='uninstall',
help='Remove Windows service')
parser.add_argument('-s', '--service',
action='store_const', dest='action', const='service',
help='Run as a Windows service')
parser.add_argument('-b', '--bind', metavar='<host:port>',
action='store', default='0.0.0.0:8142',
help='Bind to host:port (default %default)')
options = parser.parse_args(args=args)
if options.action == 'install':
fn = os.path.abspath(__file__).replace('v:', '\\\\vboxsrv\\vbox')
cmdline = '%s %s -s -b %s' % (sys.executable, fn, options.bind)
win_install_service(SVCNAME, cmdline)
return
if options.action == 'uninstall':
win_uninstall_service(SVCNAME)
return
if options.action == 'service':
win_service_start(SVCNAME, main)
return
host, port_str = options.bind.split(':')
port = int(port_str)
print('Listening on %s:%d' % (host, port))
srv = BuildHTTPServer((host, port), BuildHTTPRequestHandler)
thr = threading.Thread(target=srv.serve_forever)
thr.start()
compat_input('Press ENTER to shut down')
srv.shutdown()
thr.join()
def rmtree(path):
for name in os.listdir(path):
fname = os.path.join(path, name)
if os.path.isdir(fname):
rmtree(fname)
else:
os.chmod(fname, 0o666)
os.remove(fname)
os.rmdir(path)
class BuildError(Exception):
def __init__(self, output, code=500):
self.output = output
self.code = code
def __str__(self):
return self.output
class HTTPError(BuildError):
pass
class PythonBuilder(object):
def __init__(self, **kwargs):
python_version = kwargs.pop('python', '3.4')
python_path = None
for node in ('Wow6432Node\\', ''):
try:
key = compat_winreg.OpenKey(
compat_winreg.HKEY_LOCAL_MACHINE,
r'SOFTWARE\%sPython\PythonCore\%s\InstallPath' % (node, python_version))
try:
python_path, _ = compat_winreg.QueryValueEx(key, '')
finally:
compat_winreg.CloseKey(key)
break
except Exception:
pass
if not python_path:
raise BuildError('No such Python version: %s' % python_version)
self.pythonPath = python_path
super(PythonBuilder, self).__init__(**kwargs)
class GITInfoBuilder(object):
def __init__(self, **kwargs):
try:
self.user, self.repoName = kwargs['path'][:2]
self.rev = kwargs.pop('rev')
except ValueError:
raise BuildError('Invalid path')
except KeyError as e:
raise BuildError('Missing mandatory parameter "%s"' % e.args[0])
path = os.path.join(os.environ['APPDATA'], 'Build archive', self.repoName, self.user)
if not os.path.exists(path):
os.makedirs(path)
self.basePath = tempfile.mkdtemp(dir=path)
self.buildPath = os.path.join(self.basePath, 'build')
super(GITInfoBuilder, self).__init__(**kwargs)
class GITBuilder(GITInfoBuilder):
def build(self):
try:
subprocess.check_output(['git', 'clone', 'git://github.com/%s/%s.git' % (self.user, self.repoName), self.buildPath])
subprocess.check_output(['git', 'checkout', self.rev], cwd=self.buildPath)
except subprocess.CalledProcessError as e:
raise BuildError(e.output)
super(GITBuilder, self).build()
class YoutubeDLBuilder(object):
authorizedUsers = ['fraca7', 'phihag', 'rg3', 'FiloSottile', 'ytdl-org']
def __init__(self, **kwargs):
if self.repoName != 'yt-dlp':
raise BuildError('Invalid repository "%s"' % self.repoName)
if self.user not in self.authorizedUsers:
raise HTTPError('Unauthorized user "%s"' % self.user, 401)
super(YoutubeDLBuilder, self).__init__(**kwargs)
def build(self):
try:
proc = subprocess.Popen([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'], stdin=subprocess.PIPE, cwd=self.buildPath)
proc.wait()
#subprocess.check_output([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'],
# cwd=self.buildPath)
except subprocess.CalledProcessError as e:
raise BuildError(e.output)
super(YoutubeDLBuilder, self).build()
class DownloadBuilder(object):
def __init__(self, **kwargs):
self.handler = kwargs.pop('handler')
self.srcPath = os.path.join(self.buildPath, *tuple(kwargs['path'][2:]))
self.srcPath = os.path.abspath(os.path.normpath(self.srcPath))
if not self.srcPath.startswith(self.buildPath):
raise HTTPError(self.srcPath, 401)
super(DownloadBuilder, self).__init__(**kwargs)
def build(self):
if not os.path.exists(self.srcPath):
raise HTTPError('No such file', 404)
if os.path.isdir(self.srcPath):
raise HTTPError('Is a directory: %s' % self.srcPath, 401)
self.handler.send_response(200)
self.handler.send_header('Content-Type', 'application/octet-stream')
self.handler.send_header('Content-Disposition', 'attachment; filename=%s' % os.path.split(self.srcPath)[-1])
self.handler.send_header('Content-Length', str(os.stat(self.srcPath).st_size))
self.handler.end_headers()
with open(self.srcPath, 'rb') as src:
shutil.copyfileobj(src, self.handler.wfile)
super(DownloadBuilder, self).build()
class CleanupTempDir(object):
def build(self):
try:
rmtree(self.basePath)
except Exception as e:
print('WARNING deleting "%s": %s' % (self.basePath, e))
super(CleanupTempDir, self).build()
class Null(object):
def __init__(self, **kwargs):
pass
def start(self):
pass
def close(self):
pass
def build(self):
pass
class Builder(PythonBuilder, GITBuilder, YoutubeDLBuilder, DownloadBuilder, CleanupTempDir, Null):
pass
class BuildHTTPRequestHandler(compat_http_server.BaseHTTPRequestHandler):
actionDict = {'build': Builder, 'download': Builder} # They're the same, no more caching.
def do_GET(self):
path = compat_urlparse.urlparse(self.path)
paramDict = dict([(key, value[0]) for key, value in compat_urlparse.parse_qs(path.query).items()])
action, _, path = path.path.strip('/').partition('/')
if path:
path = path.split('/')
if action in self.actionDict:
try:
builder = self.actionDict[action](path=path, handler=self, **paramDict)
builder.start()
try:
builder.build()
finally:
builder.close()
except BuildError as e:
self.send_response(e.code)
msg = compat_str(e).encode('UTF-8')
self.send_header('Content-Type', 'text/plain; charset=UTF-8')
self.send_header('Content-Length', len(msg))
self.end_headers()
self.wfile.write(msg)
else:
self.send_response(500, 'Unknown build method "%s"' % action)
else:
self.send_response(500, 'Malformed URL')
if __name__ == '__main__':
main()

View File

@@ -1,4 +1,6 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
"""
This script employs a VERY basic heuristic ('porn' in webpage.lower()) to check
if we are not 'age_limit' tagging some porn site
@@ -10,12 +12,11 @@ pass the list filename as the only argument
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import gettestcases
from yt_dlp.utils import compat_urllib_parse_urlparse, compat_urllib_request
from yt_dlp.utils import compat_urllib_parse_urlparse
from yt_dlp.utils import compat_urllib_request
if len(sys.argv) > 1:
METHOD = 'LIST'
@@ -28,7 +29,7 @@ for test in gettestcases():
try:
webpage = compat_urllib_request.urlopen(test['url'], timeout=10).read()
except Exception:
print('\nFail: {}'.format(test['name']))
print('\nFail: {0}'.format(test['name']))
continue
webpage = webpage.decode('utf8', 'replace')
@@ -38,7 +39,7 @@ for test in gettestcases():
elif METHOD == 'LIST':
domain = compat_urllib_parse_urlparse(test['url']).netloc
if not domain:
print('\nFail: {}'.format(test['name']))
print('\nFail: {0}'.format(test['name']))
continue
domain = '.'.join(domain.split('.')[-2:])
@@ -46,11 +47,11 @@ for test in gettestcases():
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict']
or test['info_dict']['age_limit'] != 18):
print('\nPotential missing age_limit check: {}'.format(test['name']))
print('\nPotential missing age_limit check: {0}'.format(test['name']))
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict']
and test['info_dict']['age_limit'] == 18):
print('\nPotential false negative: {}'.format(test['name']))
print('\nPotential false negative: {0}'.format(test['name']))
else:
sys.stdout.write('.')

View File

@@ -0,0 +1,112 @@
# Unused
#!/usr/bin/env python3
from __future__ import unicode_literals
import io
import json
import mimetypes
import netrc
import optparse
import os
import re
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.compat import (
compat_basestring,
compat_getpass,
compat_print,
compat_urllib_request,
)
from yt_dlp.utils import (
make_HTTPS_handler,
sanitized_Request,
)
class GitHubReleaser(object):
_API_URL = 'https://api.github.com/repos/ytdl-org/youtube-dl/releases'
_UPLOADS_URL = 'https://uploads.github.com/repos/ytdl-org/youtube-dl/releases/%s/assets?name=%s'
_NETRC_MACHINE = 'github.com'
def __init__(self, debuglevel=0):
self._init_github_account()
https_handler = make_HTTPS_handler({}, debuglevel=debuglevel)
self._opener = compat_urllib_request.build_opener(https_handler)
def _init_github_account(self):
try:
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
if info is not None:
self._token = info[2]
compat_print('Using GitHub credentials found in .netrc...')
return
else:
compat_print('No GitHub credentials found in .netrc')
except (IOError, netrc.NetrcParseError):
compat_print('Unable to parse .netrc')
self._token = compat_getpass(
'Type your GitHub PAT (personal access token) and press [Return]: ')
def _call(self, req):
if isinstance(req, compat_basestring):
req = sanitized_Request(req)
req.add_header('Authorization', 'token %s' % self._token)
response = self._opener.open(req).read().decode('utf-8')
return json.loads(response)
def list_releases(self):
return self._call(self._API_URL)
def create_release(self, tag_name, name=None, body='', draft=False, prerelease=False):
data = {
'tag_name': tag_name,
'target_commitish': 'master',
'name': name,
'body': body,
'draft': draft,
'prerelease': prerelease,
}
req = sanitized_Request(self._API_URL, json.dumps(data).encode('utf-8'))
return self._call(req)
def create_asset(self, release_id, asset):
asset_name = os.path.basename(asset)
url = self._UPLOADS_URL % (release_id, asset_name)
# Our files are small enough to be loaded directly into memory.
data = open(asset, 'rb').read()
req = sanitized_Request(url, data)
mime_type, _ = mimetypes.guess_type(asset_name)
req.add_header('Content-Type', mime_type or 'application/octet-stream')
return self._call(req)
def main():
parser = optparse.OptionParser(usage='%prog CHANGELOG VERSION BUILDPATH')
options, args = parser.parse_args()
if len(args) != 3:
parser.error('Expected a version and a build directory')
changelog_file, version, build_path = args
with io.open(changelog_file, encoding='utf-8') as inf:
changelog = inf.read()
mobj = re.search(r'(?s)version %s\n{2}(.+?)\n{3}' % version, changelog)
body = mobj.group(1) if mobj else ''
releaser = GitHubReleaser()
new_release = releaser.create_release(
version, name='yt-dlp %s' % version, body=body)
release_id = new_release['id']
for asset in os.listdir(build_path):
compat_print('Uploading %s...' % asset)
releaser.create_asset(release_id, os.path.join(build_path, asset))
if __name__ == '__main__':
main()

View File

@@ -1,10 +1,12 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import optparse
import os
from os.path import dirname as dirn
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
import yt_dlp
from yt_dlp.utils import shell_quote
@@ -44,5 +46,5 @@ def build_completion(opt_parser):
f.write(filled_template)
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
parser = yt_dlp.parseOpts()[0]
build_completion(parser)

View File

@@ -1,13 +1,15 @@
#!/usr/bin/env python3
import codecs
import os
import subprocess
import sys
from __future__ import unicode_literals
import codecs
import subprocess
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.aes import aes_encrypt, key_expansion
from yt_dlp.utils import intlist_to_bytes
from yt_dlp.aes import aes_encrypt, key_expansion
secret_msg = b'Secret message goes here'

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
import sys
import hashlib
import os.path
if len(sys.argv) <= 1:
print('Specify the version number as parameter')
sys.exit()
version = sys.argv[1]
with open('update/LATEST_VERSION', 'w') as f:
f.write(version)
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
new_version = {}
filenames = {
'bin': 'yt-dlp',
'exe': 'yt-dlp.exe',
'tar': 'yt-dlp-%s.tar.gz' % version}
build_dir = os.path.join('..', '..', 'build', version)
for key, filename in filenames.items():
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
fn = os.path.join(build_dir, filename)
with open(fn, 'rb') as f:
data = f.read()
if not data:
raise ValueError('File %s is empty!' % fn)
sha256sum = hashlib.sha256(data).hexdigest()
new_version[key] = (url, sha256sum)
versions_info['versions'][version] = new_version
versions_info['latest'] = version
with open('update/versions.json', 'w') as jsonf:
json.dump(versions_info, jsonf, indent=4, sort_keys=True)

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
versions_info = json.load(open('update/versions.json'))
version = versions_info['latest']
version_dict = versions_info['versions'][version]
# Read template page
with open('download.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
template = template.replace('@PROGRAM_VERSION@', version)
template = template.replace('@PROGRAM_URL@', version_dict['bin'][0])
template = template.replace('@PROGRAM_SHA256SUM@', version_dict['bin'][1])
template = template.replace('@EXE_URL@', version_dict['exe'][0])
template = template.replace('@EXE_SHA256SUM@', version_dict['exe'][1])
template = template.replace('@TAR_URL@', version_dict['tar'][0])
template = template.replace('@TAR_SHA256SUM@', version_dict['tar'][1])
with open('download.html', 'w', encoding='utf-8') as dlf:
dlf.write(template)

View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
from __future__ import unicode_literals, with_statement
import rsa
import json
from binascii import hexlify
try:
input = raw_input
except NameError:
pass
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
print('Enter the PKCS1 private key, followed by a blank line:')
privkey = b''
while True:
try:
line = input()
except EOFError:
break
if line == '':
break
privkey += line.encode('ascii') + b'\n'
privkey = rsa.PrivateKey.load_pkcs1(privkey)
signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode()
print('signature: ' + signature)
versions_info['signature'] = signature
with open('update/versions.json', 'w') as versionsf:
json.dump(versions_info, versionsf, indent=4, sort_keys=True)

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import with_statement, unicode_literals
import datetime
import glob
import io # For Python 2 compatibility
import os
import re
year = str(datetime.datetime.now().year)
for fn in glob.glob('*.html*'):
with io.open(fn, encoding='utf-8') as f:
content = f.read()
newc = re.sub(r'(?P<copyright>Copyright © 2011-)(?P<year>[0-9]{4})', 'Copyright © 2011-' + year, content)
if content != newc:
tmpFn = fn + '.part'
with io.open(tmpFn, 'wt', encoding='utf-8') as outf:
outf.write(newc)
os.rename(tmpFn, fn)

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import datetime
import io
import json
import textwrap
atom_template = textwrap.dedent("""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<link rel="self" href="http://ytdl-org.github.io/youtube-dl/update/releases.atom" />
<title>yt-dlp releases</title>
<id>https://yt-dl.org/feed/yt-dlp-updates-feed</id>
<updated>@TIMESTAMP@</updated>
@ENTRIES@
</feed>""")
entry_template = textwrap.dedent("""
<entry>
<id>https://yt-dl.org/feed/yt-dlp-updates-feed/yt-dlp-@VERSION@</id>
<title>New version @VERSION@</title>
<link href="http://ytdl-org.github.io/yt-dlp" />
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
</div>
</content>
<author>
<name>The yt-dlp maintainers</name>
</author>
<updated>@TIMESTAMP@</updated>
</entry>
""")
now = datetime.datetime.now()
now_iso = now.isoformat() + 'Z'
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
versions_info = json.load(open('update/versions.json'))
versions = list(versions_info['versions'].keys())
versions.sort()
entries = []
for v in versions:
fields = v.split('.')
year, month, day = map(int, fields[:3])
faked = 0
patchlevel = 0
while True:
try:
datetime.date(year, month, day)
except ValueError:
day -= 1
faked += 1
assert day > 0
continue
break
if len(fields) >= 4:
try:
patchlevel = int(fields[3])
except ValueError:
patchlevel = 1
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
entry = entry_template.replace('@TIMESTAMP@', timestamp)
entry = entry.replace('@VERSION@', v)
entries.append(entry)
entries_str = textwrap.indent(''.join(entries), '\t')
atom_template = atom_template.replace('@ENTRIES@', entries_str)
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
atom_file.write(atom_template)

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import sys
import os
import textwrap
# We must be able to import yt_dlp
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import yt_dlp
def main():
with open('supportedsites.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
ie_htmls = []
for ie in yt_dlp.list_extractors(age_limit=None):
ie_html = '<b>{}</b>'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
elif ie_desc is not None:
ie_html += ': {}'.format(ie.IE_DESC)
if not ie.working():
ie_html += ' (Currently broken)'
ie_htmls.append('<li>{}</li>'.format(ie_html))
template = template.replace('@SITES@', textwrap.indent('\n'.join(ie_htmls), '\t'))
with open('supportedsites.html', 'w', encoding='utf-8') as sitesf:
sitesf.write(template)
if __name__ == '__main__':
main()

View File

@@ -1,33 +1,31 @@
import importlib
import random
# coding: utf-8
import re
from ..utils import (
age_restricted,
bug_reports_message,
classproperty,
write_string,
)
from ..utils import bug_reports_message, write_string
class LazyLoadMetaClass(type):
def __getattr__(cls, name):
# "_TESTS" bloat the lazy_extractors
if '_real_class' not in cls.__dict__ and name != 'get_testcases':
if '_real_class' not in cls.__dict__:
write_string(
'WARNING: Falling back to normal extractor since lazy extractor '
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
return getattr(cls.real_class, name)
f'WARNING: Falling back to normal extractor since lazy extractor '
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}')
return getattr(cls._get_real_class(), name)
class LazyLoadExtractor(metaclass=LazyLoadMetaClass):
@classproperty
def real_class(cls):
_module = None
_WORKING = True
@classmethod
def _get_real_class(cls):
if '_real_class' not in cls.__dict__:
cls._real_class = getattr(importlib.import_module(cls._module), cls.__name__)
mod = __import__(cls._module, fromlist=(cls.__name__,))
cls._real_class = getattr(mod, cls.__name__)
return cls._real_class
def __new__(cls, *args, **kwargs):
instance = cls.real_class.__new__(cls.real_class)
real_cls = cls._get_real_class()
instance = real_cls.__new__(real_cls)
instance.__init__(*args, **kwargs)
return instance

View File

@@ -1,4 +1,7 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import io
import optparse
import re
@@ -13,7 +16,7 @@ def main():
infile, outfile = args
with open(infile, encoding='utf-8') as inf:
with io.open(infile, encoding='utf-8') as inf:
readme = inf.read()
bug_text = re.search(
@@ -23,7 +26,7 @@ def main():
out = bug_text + dev_text
with open(outfile, 'w', encoding='utf-8') as outf:
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(out)

View File

@@ -1,18 +1,10 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import io
import optparse
def read(fname):
with open(fname, encoding='utf-8') as f:
return f.read()
# Get the version from yt_dlp/version.py without importing the package
def read_version(fname):
exec(compile(read(fname), fname, 'exec'))
return locals()['__version__']
def main():
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
options, args = parser.parse_args()
@@ -20,10 +12,18 @@ def main():
parser.error('Expected an input and an output filename')
infile, outfile = args
with open(outfile, 'w', encoding='utf-8') as outf:
outf.write(
read(infile) % {'version': read_version('yt_dlp/version.py')})
with io.open(infile, encoding='utf-8') as inf:
issue_template_tmpl = inf.read()
# Get the version from yt_dlp/version.py without importing the package
exec(compile(open('yt_dlp/version.py').read(),
'yt_dlp/version.py', 'exec'))
out = issue_template_tmpl % {'version': locals()['__version__']}
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(out)
if __name__ == '__main__':
main()

View File

@@ -1,125 +1,105 @@
#!/usr/bin/env python3
import optparse
import os
import sys
from __future__ import unicode_literals, print_function
from inspect import getsource
import io
import os
from os.path import dirname as dirn
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
lazy_extractors_filename = sys.argv[1] if len(sys.argv) > 1 else 'yt_dlp/extractor/lazy_extractors.py'
if os.path.exists(lazy_extractors_filename):
os.remove(lazy_extractors_filename)
NO_ATTR = object()
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_WORKING', '_NETRC_MACHINE', 'age_limit']
CLASS_METHODS = [
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id', 'is_suitable'
]
IE_TEMPLATE = '''
# Block plugins from loading
plugins_dirname = 'ytdlp_plugins'
plugins_blocked_dirname = 'ytdlp_plugins_blocked'
if os.path.exists(plugins_dirname):
os.rename(plugins_dirname, plugins_blocked_dirname)
from yt_dlp.extractor import _ALL_CLASSES
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
if os.path.exists(plugins_blocked_dirname):
os.rename(plugins_blocked_dirname, plugins_dirname)
with open('devscripts/lazy_load_template.py', 'rt') as f:
module_template = f.read()
CLASS_PROPERTIES = ['ie_key', 'working', '_match_valid_url', 'suitable', '_match_id', 'get_temp_id']
module_contents = [
module_template,
*[getsource(getattr(InfoExtractor, k)) for k in CLASS_PROPERTIES],
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n']
ie_template = '''
class {name}({bases}):
_module = {module!r}
_module = '{module}'
'''
with open('devscripts/lazy_load_template.py', encoding='utf-8') as f:
MODULE_TEMPLATE = f.read()
def main():
parser = optparse.OptionParser(usage='%prog [OUTFILE.py]')
args = parser.parse_args()[1] or ['yt_dlp/extractor/lazy_extractors.py']
if len(args) != 1:
parser.error('Expected only an output filename')
lazy_extractors_filename = args[0]
if os.path.exists(lazy_extractors_filename):
os.remove(lazy_extractors_filename)
_ALL_CLASSES = get_all_ies() # Must be before import
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
module_src = '\n'.join((
MODULE_TEMPLATE,
' _module = None',
*extra_ie_code(DummyInfoExtractor),
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
))
with open(lazy_extractors_filename, 'wt', encoding='utf-8') as f:
f.write(f'{module_src}\n')
def get_base_name(base):
if base is InfoExtractor:
return 'LazyLoadExtractor'
elif base is SearchInfoExtractor:
return 'LazyLoadSearchExtractor'
else:
return base.__name__
def get_all_ies():
PLUGINS_DIRNAME = 'ytdlp_plugins'
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
if os.path.exists(PLUGINS_DIRNAME):
os.rename(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
try:
from yt_dlp.extractor.extractors import _ALL_CLASSES
finally:
if os.path.exists(BLOCKED_DIRNAME):
os.rename(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
return _ALL_CLASSES
def extra_ie_code(ie, base=None):
for var in STATIC_CLASS_PROPERTIES:
val = getattr(ie, var)
if val != (getattr(base, var) if base else NO_ATTR):
yield f' {var} = {val!r}'
yield ''
for name in CLASS_METHODS:
f = getattr(ie, name)
if not base or f.__func__ != getattr(base, name).__func__:
yield getsource(f)
def build_ies(ies, bases, attr_base):
names = []
for ie in sort_ies(ies, bases):
yield build_lazy_ie(ie, ie.__name__, attr_base)
if ie in ies:
names.append(ie.__name__)
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
def sort_ies(ies, ignored_bases):
"""find the correct sorting and add the required base classes so that subclasses can be correctly created"""
classes, returned_classes = ies[:-1], set()
assert ies[-1].__name__ == 'GenericIE', 'Last IE must be GenericIE'
while classes:
for c in classes[:]:
bases = set(c.__bases__) - {object, *ignored_bases}
restart = False
for b in bases:
if b not in classes and b not in returned_classes:
assert b.__name__ != 'GenericIE', 'Cannot inherit from GenericIE'
classes.insert(0, b)
restart = True
if restart:
break
if bases <= returned_classes:
yield c
returned_classes.add(c)
classes.remove(c)
break
yield ies[-1]
def build_lazy_ie(ie, name, attr_base):
bases = ', '.join({
'InfoExtractor': 'LazyLoadExtractor',
'SearchInfoExtractor': 'LazyLoadSearchExtractor',
}.get(base.__name__, base.__name__) for base in ie.__bases__)
s = IE_TEMPLATE.format(name=name, module=ie.__module__, bases=bases)
def build_lazy_ie(ie, name):
s = ie_template.format(
name=name,
bases=', '.join(map(get_base_name, ie.__bases__)),
module=ie.__module__)
valid_url = getattr(ie, '_VALID_URL', None)
if not valid_url and hasattr(ie, '_make_valid_url'):
valid_url = ie._make_valid_url()
if valid_url:
s += f' _VALID_URL = {valid_url!r}\n'
return s + '\n'.join(extra_ie_code(ie, attr_base))
if not ie._WORKING:
s += ' _WORKING = False\n'
if ie.suitable.__func__ is not InfoExtractor.suitable.__func__:
s += f'\n{getsource(ie.suitable)}'
return s
if __name__ == '__main__':
main()
# find the correct sorting and add the required base classes so that subclasses
# can be correctly created
classes = _ALL_CLASSES[:-1]
ordered_cls = []
while classes:
for c in classes[:]:
bases = set(c.__bases__) - set((object, InfoExtractor, SearchInfoExtractor))
stop = False
for b in bases:
if b not in classes and b not in ordered_cls:
if b.__name__ == 'GenericIE':
exit()
classes.insert(0, b)
stop = True
if stop:
break
if all(b in ordered_cls for b in bases):
ordered_cls.append(c)
classes.remove(c)
break
ordered_cls.append(_ALL_CLASSES[-1])
names = []
for ie in ordered_cls:
name = ie.__name__
src = build_lazy_ie(ie, name)
module_contents.append(src)
if ie in _ALL_CLASSES:
names.append(name)
module_contents.append(
'\n_ALL_CLASSES = [{0}]'.format(', '.join(names)))
module_src = '\n'.join(module_contents) + '\n'
with io.open(lazy_extractors_filename, 'wt', encoding='utf-8') as f:
f.write(module_src)

View File

@@ -2,69 +2,30 @@
# yt-dlp --help | make_readme.py
# This must be run in a console of correct width
import functools
import re
from __future__ import unicode_literals
import io
import sys
import re
README_FILE = 'README.md'
helptext = sys.stdin.read()
OPTIONS_START = 'General Options:'
OPTIONS_END = 'CONFIGURATION'
EPILOG_START = 'See full documentation'
ALLOWED_OVERSHOOT = 2
if isinstance(helptext, bytes):
helptext = helptext.decode('utf-8')
DISABLE_PATCH = object()
with io.open(README_FILE, encoding='utf-8') as f:
oldreadme = f.read()
header = oldreadme[:oldreadme.index('## General Options:')]
footer = oldreadme[oldreadme.index('# CONFIGURATION'):]
def take_section(text, start=None, end=None, *, shift=0):
return text[
text.index(start) + shift if start else None:
text.index(end) + shift if end else None
]
options = helptext[helptext.index(' General Options:'):]
options = re.sub(r'(?m)^ (\w.+)$', r'## \1', options)
options = options + '\n'
def apply_patch(text, patch):
return text if patch[0] is DISABLE_PATCH else re.sub(*patch, text)
options = take_section(sys.stdin.read(), f'\n {OPTIONS_START}', f'\n{EPILOG_START}', shift=1)
max_width = max(map(len, options.split('\n')))
switch_col_width = len(re.search(r'(?m)^\s{5,}', options).group())
delim = f'\n{" " * switch_col_width}'
PATCHES = (
( # Headings
r'(?m)^ (\w.+\n)( (?=\w))?',
r'## \1'
),
( # Do not split URLs
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
),
( # Do not split "words"
rf'(?m)({delim}\S+)+$',
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
),
( # Allow overshooting last line
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
lambda mobj: (mobj.group().replace(delim, ' ')
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
else mobj.group())
),
( # Avoid newline when a space is available b/w switch and description
DISABLE_PATCH, # This creates issues with prepare_manpage
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
r'\1 '
),
)
with open(README_FILE, encoding='utf-8') as f:
readme = f.read()
with open(README_FILE, 'w', encoding='utf-8') as f:
f.write(''.join((
take_section(readme, end=f'## {OPTIONS_START}'),
functools.reduce(apply_patch, PATCHES, options),
take_section(readme, f'# {OPTIONS_END}'),
)))
with io.open(README_FILE, 'w', encoding='utf-8') as f:
f.write(header)
f.write(options)
f.write(footer)

View File

@@ -1,23 +1,48 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import io
import optparse
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.extractor import list_extractor_classes
# Import yt_dlp
ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.insert(0, ROOT_DIR)
import yt_dlp
def main():
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
_, args = parser.parse_args()
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Expected an output filename')
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
outfile, = args
with open(args[0], 'w', encoding='utf-8') as outf:
outf.write(f'# Supported sites\n{out}\n')
def gen_ies_md(ies):
for ie in ies:
ie_md = '**{0}**'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
if ie_desc is not None:
ie_md += ': {0}'.format(ie.IE_DESC)
search_key = getattr(ie, 'SEARCH_KEY', None)
if search_key is not None:
ie_md += f'; "{ie.SEARCH_KEY}:" prefix'
if not ie.working():
ie_md += ' (Currently broken)'
yield ie_md
ies = sorted(yt_dlp.gen_extractors(), key=lambda i: i.IE_NAME.lower())
out = '# Supported sites\n' + ''.join(
' - ' + md + '\n'
for md in gen_ies_md(ies))
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(out)
if __name__ == '__main__':

6
devscripts/posix-locale.sh Executable file
View File

@@ -0,0 +1,6 @@
# source this file in your shell to get a POSIX locale (which will break many programs, but that's kind of the point)
export LC_ALL=POSIX
export LANG=POSIX
export LANGUAGE=POSIX

View File

@@ -1,4 +1,7 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import io
import optparse
import os.path
import re
@@ -29,14 +32,14 @@ def main():
outfile, = args
with open(README_FILE, encoding='utf-8') as f:
with io.open(README_FILE, encoding='utf-8') as f:
readme = f.read()
readme = filter_excluded_sections(readme)
readme = move_sections(readme)
readme = filter_options(readme)
with open(outfile, 'w', encoding='utf-8') as outf:
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(PREFIX + readme)

143
devscripts/release.sh Executable file
View File

@@ -0,0 +1,143 @@
# Unused
#!/bin/bash
# IMPORTANT: the following assumptions are made
# * the GH repo is on the origin remote
# * the gh-pages branch is named so locally
# * the git config user.signingkey is properly set
# You will need
# pip install coverage nose rsa wheel
# TODO
# release notes
# make hash on local files
set -e
skip_tests=true
gpg_sign_commits=""
buildserver='localhost:8142'
while true
do
case "$1" in
--run-tests)
skip_tests=false
shift
;;
--gpg-sign-commits|-S)
gpg_sign_commits="-S"
shift
;;
--buildserver)
buildserver="$2"
shift 2
;;
--*)
echo "ERROR: unknown option $1"
exit 1
;;
*)
break
;;
esac
done
if [ -z "$1" ]; then echo "ERROR: specify version number like this: $0 1994.09.06"; exit 1; fi
version="$1"
major_version=$(echo "$version" | sed -n 's#^\([0-9]*\.[0-9]*\.[0-9]*\).*#\1#p')
if test "$major_version" '!=' "$(date '+%Y.%m.%d')"; then
echo "$version does not start with today's date!"
exit 1
fi
if [ ! -z "`git tag | grep "$version"`" ]; then echo 'ERROR: version already present'; exit 1; fi
if [ ! -z "`git status --porcelain | grep -v CHANGELOG`" ]; then echo 'ERROR: the working directory is not clean; commit or stash changes'; exit 1; fi
useless_files=$(find yt_dlp -type f -not -name '*.py')
if [ ! -z "$useless_files" ]; then echo "ERROR: Non-.py files in yt_dlp: $useless_files"; exit 1; fi
if [ ! -f "updates_key.pem" ]; then echo 'ERROR: updates_key.pem missing'; exit 1; fi
if ! type pandoc >/dev/null 2>/dev/null; then echo 'ERROR: pandoc is missing'; exit 1; fi
if ! python3 -c 'import rsa' 2>/dev/null; then echo 'ERROR: python3-rsa is missing'; exit 1; fi
if ! python3 -c 'import wheel' 2>/dev/null; then echo 'ERROR: wheel is missing'; exit 1; fi
read -p "Is Changelog up to date? (y/n) " -n 1
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
/bin/echo -e "\n### First of all, testing..."
make clean
if $skip_tests ; then
echo 'SKIPPING TESTS'
else
nosetests --verbose --with-coverage --cover-package=yt_dlp --cover-html test --stop || exit 1
fi
/bin/echo -e "\n### Changing version in version.py..."
sed -i "s/__version__ = '.*'/__version__ = '$version'/" yt_dlp/version.py
/bin/echo -e "\n### Changing version in Changelog..."
sed -i "s/<unreleased>/$version/" Changelog.md
/bin/echo -e "\n### Committing documentation, templates and yt_dlp/version.py..."
make README.md CONTRIBUTING.md issuetemplates supportedsites
git add README.md CONTRIBUTING.md .github/ISSUE_TEMPLATE/1_broken_site.md .github/ISSUE_TEMPLATE/2_site_support_request.md .github/ISSUE_TEMPLATE/3_site_feature_request.md .github/ISSUE_TEMPLATE/4_bug_report.md .github/ISSUE_TEMPLATE/5_feature_request.md .github/ISSUE_TEMPLATE/6_question.md docs/supportedsites.md yt_dlp/version.py Changelog.md
git commit $gpg_sign_commits -m "release $version"
/bin/echo -e "\n### Now tagging, signing and pushing..."
git tag -s -m "Release $version" "$version"
git show "$version"
read -p "Is it good, can I push? (y/n) " -n 1
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
echo
MASTER=$(git rev-parse --abbrev-ref HEAD)
git push origin $MASTER:master
git push origin "$version"
/bin/echo -e "\n### OK, now it is time to build the binaries..."
REV=$(git rev-parse HEAD)
make yt-dlp yt-dlp.tar.gz
read -p "VM running? (y/n) " -n 1
wget "http://$buildserver/build/ytdl-org/youtube-dl/yt-dlp.exe?rev=$REV" -O yt-dlp.exe
mkdir -p "build/$version"
mv yt-dlp yt-dlp.exe "build/$version"
mv yt-dlp.tar.gz "build/$version/yt-dlp-$version.tar.gz"
RELEASE_FILES="yt-dlp yt-dlp.exe yt-dlp-$version.tar.gz"
(cd build/$version/ && md5sum $RELEASE_FILES > MD5SUMS)
(cd build/$version/ && sha1sum $RELEASE_FILES > SHA1SUMS)
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
/bin/echo -e "\n### Signing and uploading the new binaries to GitHub..."
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
ROOT=$(pwd)
python devscripts/create-github-release.py Changelog.md $version "$ROOT/build/$version"
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
/bin/echo -e "\n### Now switching to gh-pages..."
git clone --branch gh-pages --single-branch . build/gh-pages
(
set -e
ORIGIN_URL=$(git config --get remote.origin.url)
cd build/gh-pages
"$ROOT/devscripts/gh-pages/add-version.py" $version
"$ROOT/devscripts/gh-pages/update-feed.py"
"$ROOT/devscripts/gh-pages/sign-versions.py" < "$ROOT/updates_key.pem"
"$ROOT/devscripts/gh-pages/generate-download.py"
"$ROOT/devscripts/gh-pages/update-copyright.py"
"$ROOT/devscripts/gh-pages/update-sites.py"
git add *.html *.html.in update
git commit $gpg_sign_commits -m "release $version"
git push "$ROOT" gh-pages
git push "$ORIGIN_URL" gh-pages
)
rm -rf build
make pypi-files
echo "Uploading to PyPi ..."
python setup.py sdist bdist_wheel upload
make clean
/bin/echo -e "\n### DONE!"

View File

@@ -13,5 +13,4 @@ if ["%~1"]==[""] (
exit /b 1
)
set PYTHONWARNINGS=error
pytest %test_set%

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env sh
#!/bin/sh
if [ -z $1 ]; then
test_set='test'
@@ -11,4 +11,4 @@ else
exit 1
fi
python3 -bb -Werror -m pytest "$test_set"
python3 -m pytest "$test_set"

View File

@@ -0,0 +1,49 @@
# Unused
#!/usr/bin/env python3
from __future__ import unicode_literals
import itertools
import json
import os
import re
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.compat import (
compat_print,
compat_urllib_request,
)
from yt_dlp.utils import format_bytes
def format_size(bytes):
return '%s (%d bytes)' % (format_bytes(bytes), bytes)
total_bytes = 0
for page in itertools.count(1):
releases = json.loads(compat_urllib_request.urlopen(
'https://api.github.com/repos/ytdl-org/youtube-dl/releases?page=%s' % page
).read().decode('utf-8'))
if not releases:
break
for release in releases:
compat_print(release['name'])
for asset in release['assets']:
asset_name = asset['name']
total_bytes += asset['download_count'] * asset['size']
if all(not re.match(p, asset_name) for p in (
r'^yt-dlp$',
r'^yt-dlp-\d{4}\.\d{2}\.\d{2}(?:\.\d+)?\.tar\.gz$',
r'^yt-dlp\.exe$')):
continue
compat_print(
' %s size: %s downloads: %d'
% (asset_name, format_size(asset['size']), asset['download_count']))
compat_print('total downloads traffic: %s' % format_size(total_bytes))

View File

@@ -1,4 +1,6 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
import os
import re
@@ -8,6 +10,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.compat import compat_urllib_request
# usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
# version can be either 0-aligned (yt-dlp version) or normalized (PyPl version)
@@ -17,14 +20,14 @@ normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
pypi_release = json.loads(compat_urllib_request.urlopen(
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
).read().decode())
).read().decode('utf-8'))
tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.tar.gz'))
sha256sum = tarball_file['digests']['sha256']
url = tarball_file['url']
with open(filename) as r:
with open(filename, 'r') as r:
formulae_text = r.read()
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text)

View File

@@ -1,9 +1,10 @@
#!/usr/bin/env python3
import subprocess
import sys
from datetime import datetime
import sys
import subprocess
with open('yt_dlp/version.py') as f:
with open('yt_dlp/version.py', 'rt') as f:
exec(compile(f.read(), 'yt_dlp/version.py', 'exec'))
old_version = locals()['__version__']

58
devscripts/wine-py2exe.sh Executable file
View File

@@ -0,0 +1,58 @@
# UNUSED
#!/bin/bash
# Run with as parameter a setup.py that works in the current directory
# e.g. no os.chdir()
# It will run twice, the first time will crash
set -e
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
if [ ! -d wine-py2exe ]; then
sudo apt-get install wine1.3 axel bsdiff
mkdir wine-py2exe
cd wine-py2exe
export WINEPREFIX=`pwd`
axel -a "http://www.python.org/ftp/python/2.7/python-2.7.msi"
axel -a "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe"
#axel -a "http://winetricks.org/winetricks"
# http://appdb.winehq.org/objectManager.php?sClass=version&iId=21957
echo "Follow python setup on screen"
wine msiexec /i python-2.7.msi
echo "Follow py2exe setup on screen"
wine py2exe-0.6.9.win32-py2.7.exe
#echo "Follow Microsoft Visual C++ 2008 Redistributable Package setup on screen"
#bash winetricks vcrun2008
rm py2exe-0.6.9.win32-py2.7.exe
rm python-2.7.msi
#rm winetricks
# http://bugs.winehq.org/show_bug.cgi?id=3591
mv drive_c/Python27/Lib/site-packages/py2exe/run.exe drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup
bspatch drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run.exe "$SCRIPT_DIR/SizeOfImage.patch"
mv drive_c/Python27/Lib/site-packages/py2exe/run_w.exe drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup
bspatch drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run_w.exe "$SCRIPT_DIR/SizeOfImage_w.patch"
cd -
else
export WINEPREFIX="$( cd wine-py2exe && pwd )"
fi
wine "C:\\Python27\\python.exe" "$1" py2exe > "py2exe.log" 2>&1 || true
echo '# Copying python27.dll' >> "py2exe.log"
cp "$WINEPREFIX/drive_c/windows/system32/python27.dll" build/bdist.win32/winexe/bundle-2.7/
wine "C:\\Python27\\python.exe" "$1" py2exe >> "py2exe.log" 2>&1

View File

@@ -1,9 +1,11 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import os
from os.path import dirname as dirn
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
import yt_dlp
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
@@ -43,5 +45,5 @@ def build_completion(opt_parser):
f.write(template)
parser = yt_dlp.parseOpts(ignore_config_files=True)[0]
parser = yt_dlp.parseOpts()[0]
build_completion(parser)

1
docs/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
_build/

5
docs/Changelog.md Normal file
View File

@@ -0,0 +1,5 @@
---
orphan: true
---
```{include} ../Changelog.md
```

5
docs/Collaborators.md Normal file
View File

@@ -0,0 +1,5 @@
---
orphan: true
---
```{include} ../Collaborators.md
```

5
docs/Contributing.md Normal file
View File

@@ -0,0 +1,5 @@
---
orphan: true
---
```{include} ../Contributing.md
```

6
docs/LICENSE.md Normal file
View File

@@ -0,0 +1,6 @@
---
orphan: true
---
# LICENSE
```{include} ../LICENSE
```

177
docs/Makefile Normal file
View File

@@ -0,0 +1,177 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/yt-dlp.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/yt-dlp.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/yt-dlp"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/yt-dlp"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

2
docs/README.md Normal file
View File

@@ -0,0 +1,2 @@
```{include} ../README.md
```

68
docs/conf.py Normal file
View File

@@ -0,0 +1,68 @@
# coding: utf-8
#
# yt-dlp documentation build configuration file
import sys
import os
# Allows to import yt-dlp
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'myst_parser',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The master toctree document.
master_doc = 'README'
# General information about the project.
project = u'yt-dlp'
author = u'yt-dlp'
copyright = u'UNLICENSE'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from yt_dlp.version import __version__
version = __version__
# The full version, including alpha/beta/rc tags.
release = version
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Disable highlights
highlight_language = 'none'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Enable heading anchors
myst_heading_anchors = 4
# Suppress heading warnings
suppress_warnings = [
'myst.header',
]

1
docs/requirements.txt Normal file
View File

@@ -0,0 +1 @@
myst-parser

5
docs/supportedsites.md Normal file
View File

@@ -0,0 +1,5 @@
---
orphan: true
---
```{include} ../supportedsites.md
```

6
docs/ytdlp_plugins.md Normal file
View File

@@ -0,0 +1,6 @@
---
orphan: true
---
# ytdlp_plugins
See [https://github.com/yt-dlp/yt-dlp/tree/master/ytdlp_plugins](https://github.com/yt-dlp/yt-dlp/tree/master/ytdlp_plugins).

View File

@@ -1,22 +1,33 @@
#!/usr/bin/env python3
# coding: utf-8
import os
import platform
import sys
from PyInstaller.utils.hooks import collect_submodules
from PyInstaller.__main__ import run as run_pyinstaller
OS_NAME, ARCH = sys.platform, platform.architecture()[0][:2]
OS_NAME = platform.system()
if OS_NAME == 'Windows':
from PyInstaller.utils.win32.versioninfo import (
VarStruct, VarFileInfo, StringStruct, StringTable,
StringFileInfo, FixedFileInfo, VSVersionInfo, SetVersion,
)
elif OS_NAME == 'Darwin':
pass
else:
raise Exception('{OS_NAME} is not supported')
ARCH = platform.architecture()[0][:2]
def main():
opts = parse_options()
version = read_version('yt_dlp/version.py')
version = read_version()
onedir = '--onedir' in opts or '-D' in opts
if not onedir and '-F' not in opts and '--onefile' not in opts:
opts.append('--onefile')
suffix = '_macos' if OS_NAME == 'Darwin' else '_x86' if ARCH == '32' else ''
final_file = 'dist/%syt-dlp%s%s' % (
'yt-dlp/' if '--onedir' in opts else '', suffix, '.exe' if OS_NAME == 'Windows' else '')
name, final_file = exe(onedir)
print(f'Building yt-dlp v{version} {ARCH}bit for {OS_NAME} with options {opts}')
print('Remember to update the version using "devscripts/update-version.py"')
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
@@ -25,20 +36,20 @@ def main():
print(f'Destination: {final_file}\n')
opts = [
f'--name={name}',
f'--name=yt-dlp{suffix}',
'--icon=devscripts/logo.ico',
'--upx-exclude=vcruntime140.dll',
'--noconfirm',
# NB: Modules that are only imported dynamically must be added here.
# --collect-submodules may not work correctly if user has a yt-dlp installed via PIP
'--hidden-import=yt_dlp.compat._legacy',
*dependency_options(),
*opts,
'yt_dlp/__main__.py',
]
print(f'Running PyInstaller with {opts}')
run_pyinstaller(opts)
import PyInstaller.__main__
PyInstaller.__main__.run(opts)
set_version_info(final_file, version)
@@ -49,29 +60,12 @@ def parse_options():
if ARCH != opts[0]:
raise Exception(f'{opts[0]}bit executable cannot be built on a {ARCH}bit system')
opts = opts[1:]
return opts
return opts or ['--onefile']
# Get the version from yt_dlp/version.py without importing the package
def read_version(fname):
with open(fname, encoding='utf-8') as f:
exec(compile(f.read(), fname, 'exec'))
return locals()['__version__']
def exe(onedir):
"""@returns (name, path)"""
name = '_'.join(filter(None, (
'yt-dlp',
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
ARCH == '32' and 'x86'
)))
return name, ''.join(filter(None, (
'dist/',
onedir and f'{name}/',
name,
OS_NAME == 'win32' and '.exe'
)))
def read_version():
exec(compile(open('yt_dlp/version.py').read(), 'yt_dlp/version.py', 'exec'))
return locals()['__version__']
def version_to_list(version):
@@ -80,12 +74,10 @@ def version_to_list(version):
def dependency_options():
# Due to the current implementation, these are auto-detected, but explicitly add them just in case
dependencies = [pycryptodome_module(), 'mutagen', 'brotli', 'certifi', 'websockets']
excluded_modules = ['test', 'ytdlp_plugins', 'youtube_dl', 'youtube_dlc']
dependencies = [pycryptodome_module(), 'mutagen', 'brotli'] + collect_submodules('websockets')
excluded_modules = ['test', 'ytdlp_plugins', 'youtube-dl', 'youtube-dlc']
yield from (f'--hidden-import={module}' for module in dependencies)
yield '--collect-submodules=websockets'
yield from (f'--exclude-module={module}' for module in excluded_modules)
@@ -104,22 +96,11 @@ def pycryptodome_module():
def set_version_info(exe, version):
if OS_NAME == 'win32':
if OS_NAME == 'Windows':
windows_set_version(exe, version)
def windows_set_version(exe, version):
from PyInstaller.utils.win32.versioninfo import (
FixedFileInfo,
SetVersion,
StringFileInfo,
StringStruct,
StringTable,
VarFileInfo,
VarStruct,
VSVersionInfo,
)
version_list = version_to_list(version)
suffix = '_x86' if ARCH == '32' else ''
SetVersion(exe, VSVersionInfo(

4
pytest.ini Normal file
View File

@@ -0,0 +1,4 @@
[pytest]
addopts = -ra -v --strict-markers
markers =
download

View File

@@ -2,5 +2,4 @@ mutagen
pycryptodomex
websockets
brotli; platform_python_implementation=='CPython'
brotlicffi; platform_python_implementation!='CPython'
certifi
brotlicffi; platform_python_implementation!='CPython'

View File

@@ -1,39 +1,6 @@
[wheel]
universal = true
universal = True
[flake8]
exclude = build,venv,.tox,.git,.pytest_cache
ignore = E402,E501,E731,E741,W503
max_line_length = 120
per_file_ignores =
devscripts/lazy_load_template.py: F401
[tool:pytest]
addopts = -ra -v --strict-markers
markers =
download
[tox:tox]
skipsdist = true
envlist = py{36,37,38,39,310},pypy{36,37,38,39}
skip_missing_interpreters = true
[testenv] # tox
deps =
pytest
commands = pytest {posargs:"-m not download"}
passenv = HOME # For test_compat_expanduser
setenv =
# PYTHONWARNINGS = error # Catches PIP's warnings too
[isort]
py_version = 36
multi_line_output = VERTICAL_HANGING_INDENT
line_length = 80
reverse_relative = true
ensure_newline_before_comments = true
include_trailing_comma = true
exclude = yt_dlp/extractor/__init__.py,devscripts/buildserver.py,devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv,devscripts/create-github-release.py,devscripts/release.sh,devscripts/show-downloads-statistics.py
ignore = E402,E501,E731,E741,W503

View File

@@ -1,42 +1,33 @@
#!/usr/bin/env python3
# coding: utf-8
import os.path
import sys
import warnings
import sys
try:
from setuptools import Command, find_packages, setup
from setuptools import setup, Command, find_packages
setuptools_available = True
except ImportError:
from distutils.core import Command, setup
from distutils.core import setup, Command
setuptools_available = False
from distutils.spawn import spawn
def read(fname):
with open(fname, encoding='utf-8') as f:
return f.read()
# Get the version from yt_dlp/version.py without importing the package
def read_version(fname):
exec(compile(read(fname), fname, 'exec'))
return locals()['__version__']
exec(compile(open('yt_dlp/version.py').read(), 'yt_dlp/version.py', 'exec'))
VERSION = read_version('yt_dlp/version.py')
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
LONG_DESCRIPTION = '\n\n'.join((
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
read('README.md')))
open('README.md').read()))
REQUIREMENTS = read('requirements.txt').splitlines()
REQUIREMENTS = open('requirements.txt').read().splitlines()
if sys.argv[1:2] == ['py2exe']:
import py2exe # noqa: F401
import py2exe
warnings.warn(
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
'The recommended way is to use "pyinst.py" to build using pyinstaller')
@@ -44,11 +35,11 @@ if sys.argv[1:2] == ['py2exe']:
'console': [{
'script': './yt_dlp/__main__.py',
'dest_base': 'yt-dlp',
'version': VERSION,
'version': __version__,
'description': DESCRIPTION,
'comments': LONG_DESCRIPTION.split('\n')[0],
'product_name': 'yt-dlp',
'product_version': VERSION,
'product_version': __version__,
}],
'options': {
'py2exe': {
@@ -58,8 +49,6 @@ if sys.argv[1:2] == ['py2exe']:
'dist_dir': './dist',
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
# Modules that are only imported dynamically must be added here
'includes': ['yt_dlp.compat._legacy'],
}
},
'zipfile': None
@@ -117,7 +106,7 @@ else:
setup(
name='yt-dlp',
version=VERSION,
version=__version__,
maintainer='pukkandan',
maintainer_email='pukkandan.ytdlp@gmail.com',
description=DESCRIPTION,
@@ -127,7 +116,7 @@ setup(
packages=packages,
install_requires=REQUIREMENTS,
project_urls={
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
'Documentation': 'https://yt-dlp.readthedocs.io',
'Source': 'https://github.com/yt-dlp/yt-dlp',
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
@@ -140,9 +129,6 @@ setup(
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: Implementation',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',

View File

@@ -1,6 +1,4 @@
# Supported sites
- **0000studio:archive**
- **0000studio:clip**
- **17live**
- **17live:clip**
- **1tv**: Первый канал
@@ -26,12 +24,12 @@
- **abcnews:video**
- **abcotvs**: ABC Owned Television Stations
- **abcotvs:clips**
- **AbemaTV**: [<abbr title="netrc machine"><em>abematv</em></abbr>]
- **AbemaTV**
- **AbemaTVTitle**
- **AcademicEarth:Course**
- **acast**
- **acast:channel**
- **ADN**: [<abbr title="netrc machine"><em>animedigitalnetwork</em></abbr>] Anime Digital Network
- **ADN**: Anime Digital Network
- **AdobeConnect**
- **adobetv**
- **adobetv:channel**
@@ -42,9 +40,8 @@
- **aenetworks**: A+E Networks: A&E, Lifetime, History.com, FYI Network and History Vault
- **aenetworks:collection**
- **aenetworks:show**
- **afreecatv**: [<abbr title="netrc machine"><em>afreecatv</em></abbr>] afreecatv.com
- **afreecatv:live**: [<abbr title="netrc machine"><em>afreecatv</em></abbr>] afreecatv.com
- **afreecatv:user**
- **afreecatv**: afreecatv.com
- **afreecatv:live**: afreecatv.com
- **AirMozilla**
- **AliExpressLive**
- **AlJazeera**
@@ -52,8 +49,8 @@
- **AlphaPorno**
- **Alsace20TV**
- **Alsace20TVEmbed**
- **Alura**: [<abbr title="netrc machine"><em>alura</em></abbr>]
- **AluraCourse**: [<abbr title="netrc machine"><em>aluracourse</em></abbr>]
- **Alura**
- **AluraCourse**
- **Amara**
- **AmazonStore**
- **AMCNetworks**
@@ -62,7 +59,9 @@
- **AmHistoryChannel**
- **anderetijden**: npo.nl, ntr.nl, omroepwnl.nl, zapp.nl and npo3.nl
- **AnimalPlanet**
- **AnimeOnDemand**: [<abbr title="netrc machine"><em>animeondemand</em></abbr>]
- **AnimeLab**
- **AnimeLabShows**
- **AnimeOnDemand**
- **ant1newsgr:article**: ant1news.gr articles
- **ant1newsgr:embed**: ant1news.gr embedded videos
- **ant1newsgr:watch**: ant1news.gr videos
@@ -88,8 +87,7 @@
- **ArteTVPlaylist**
- **AsianCrush**
- **AsianCrushPlaylist**
- **AtresPlayer**: [<abbr title="netrc machine"><em>atresplayer</em></abbr>]
- **AtScaleConfEvent**
- **AtresPlayer**
- **ATTTechChannel**
- **ATVAt**
- **AudiMedia**
@@ -106,8 +104,6 @@
- **awaan:video**
- **AZMedien**: AZ Medien videos
- **BaiduVideo**: 百度视频
- **BanBye**
- **BanByeChannel**
- **bandaichannel**
- **Bandcamp**
- **Bandcamp:album**
@@ -115,13 +111,13 @@
- **Bandcamp:weekly**
- **bangumi.bilibili.com**: BiliBili番剧
- **BannedVideo**
- **bbc**: [<abbr title="netrc machine"><em>bbc</em></abbr>] BBC
- **bbc.co.uk**: [<abbr title="netrc machine"><em>bbc</em></abbr>] BBC iPlayer
- **bbc**: BBC
- **bbc.co.uk**: BBC iPlayer
- **bbc.co.uk:article**: BBC articles
- **bbc.co.uk:iplayer:episodes**
- **bbc.co.uk:iplayer:group**
- **bbc.co.uk:playlist**
- **BBVTV**: [<abbr title="netrc machine"><em>bbvtv</em></abbr>]
- **BBVTV**
- **Beatport**
- **Beeg**
- **BehindKink**
@@ -143,9 +139,8 @@
- **BilibiliChannel**
- **BiliBiliPlayer**
- **BiliBiliSearch**: Bilibili video search; "bilisearch:" prefix
- **BiliIntl**: [<abbr title="netrc machine"><em>biliintl</em></abbr>]
- **BiliIntlSeries**: [<abbr title="netrc machine"><em>biliintl</em></abbr>]
- **BiliLive**
- **BiliIntl**
- **BiliIntlSeries**
- **BioBioChileTV**
- **Biography**
- **BIQLE**
@@ -250,14 +245,13 @@
- **cpac:playlist**
- **Cracked**
- **Crackle**
- **Craftsy**
- **CrooksAndLiars**
- **CrowdBunker**
- **CrowdBunkerChannel**
- **crunchyroll**: [<abbr title="netrc machine"><em>crunchyroll</em></abbr>]
- **crunchyroll:beta**: [<abbr title="netrc machine"><em>crunchyroll</em></abbr>]
- **crunchyroll:playlist**: [<abbr title="netrc machine"><em>crunchyroll</em></abbr>]
- **crunchyroll:playlist:beta**: [<abbr title="netrc machine"><em>crunchyroll</em></abbr>]
- **crunchyroll**
- **crunchyroll:beta**
- **crunchyroll:playlist**
- **crunchyroll:playlist:beta**
- **CSpan**: C-SPAN
- **CSpanCongress**
- **CtsNews**: 華視新聞
@@ -265,20 +259,16 @@
- **CTVNews**
- **cu.ntv.co.jp**: Nippon Television Network
- **CultureUnplugged**
- **curiositystream**: [<abbr title="netrc machine"><em>curiositystream</em></abbr>]
- **curiositystream:collections**: [<abbr title="netrc machine"><em>curiositystream</em></abbr>]
- **curiositystream:series**: [<abbr title="netrc machine"><em>curiositystream</em></abbr>]
- **curiositystream**
- **curiositystream:collections**
- **curiositystream:series**
- **CWTV**
- **Cybrary**: [<abbr title="netrc machine"><em>cybrary</em></abbr>]
- **CybraryCourse**: [<abbr title="netrc machine"><em>cybrary</em></abbr>]
- **Daftsex**
- **DagelijkseKost**: dagelijksekost.een.be
- **DailyMail**
- **dailymotion**: [<abbr title="netrc machine"><em>dailymotion</em></abbr>]
- **dailymotion:playlist**: [<abbr title="netrc machine"><em>dailymotion</em></abbr>]
- **dailymotion:user**: [<abbr title="netrc machine"><em>dailymotion</em></abbr>]
- **DailyWire**
- **DailyWirePodcast**
- **dailymotion**
- **dailymotion:playlist**
- **dailymotion:user**
- **damtomo:record**
- **damtomo:video**
- **daum.net**
@@ -295,7 +285,7 @@
- **DestinationAmerica**
- **DHM**: Filmarchiv - Deutsches Historisches Museum
- **Digg**
- **DigitalConcertHall**: [<abbr title="netrc machine"><em>digitalconcerthall</em></abbr>] DigitalConcertHall extractor
- **DigitalConcertHall**: DigitalConcertHall extractor
- **DigitallySpeaking**
- **Digiteka**
- **Discovery**
@@ -319,14 +309,14 @@
- **DRBonanza**
- **Drooble**
- **Dropbox**
- **Dropout**: [<abbr title="netrc machine"><em>dropout</em></abbr>]
- **Dropout**
- **DropoutSeason**
- **DrTuber**
- **drtv**
- **drtv:live**
- **DTube**
- **duboku**: www.duboku.io
- **duboku:list**: www.duboku.io entire series
- **duboku**: www.duboku.co
- **duboku:list**: www.duboku.co entire series
- **Dumpert**
- **dvtv**: http://video.aktualne.cz/
- **dw**
@@ -338,7 +328,7 @@
- **egghead:lesson**: egghead.io lesson
- **ehftv**
- **eHow**
- **EinsUndEinsTV**: [<abbr title="netrc machine"><em>1und1tv</em></abbr>]
- **EinsUndEinsTV**
- **Einthusan**
- **eitb.tv**
- **EllenTube**
@@ -352,7 +342,7 @@
- **Epicon**
- **EpiconSeries**
- **Eporner**
- **EroProfile**: [<abbr title="netrc machine"><em>eroprofile</em></abbr>]
- **EroProfile**
- **EroProfile:album**
- **ertflix**: ERTFLIX videos
- **ertflix:codename**: ERTFLIX videos by codename
@@ -365,21 +355,20 @@
- **Europa**
- **EuropeanTour**
- **EUScreen**
- **EWETV**: [<abbr title="netrc machine"><em>ewetv</em></abbr>]
- **EWETV**
- **ExpoTV**
- **Expressen**
- **ExtremeTube**
- **EyedoTV**
- **facebook**: [<abbr title="netrc machine"><em>facebook</em></abbr>]
- **facebook**
- **FacebookPluginsVideo**
- **fancode:live**: [<abbr title="netrc machine"><em>fancode</em></abbr>]
- **fancode:vod**: [<abbr title="netrc machine"><em>fancode</em></abbr>]
- **fancode:live**
- **fancode:vod**
- **faz.net**
- **fc2**: [<abbr title="netrc machine"><em>fc2</em></abbr>]
- **fc2**
- **fc2:embed**
- **fc2:live**
- **Fczenit**
- **Fifa**
- **Filmmodu**
- **filmon**
- **filmon:channel**
@@ -406,21 +395,19 @@
- **FranceTVSite**
- **Freesound**
- **freespeech.org**
- **freetv:series**
- **FreeTvMovies**
- **FrontendMasters**: [<abbr title="netrc machine"><em>frontendmasters</em></abbr>]
- **FrontendMastersCourse**: [<abbr title="netrc machine"><em>frontendmasters</em></abbr>]
- **FrontendMastersLesson**: [<abbr title="netrc machine"><em>frontendmasters</em></abbr>]
- **FrontendMasters**
- **FrontendMastersCourse**
- **FrontendMastersLesson**
- **FujiTVFODPlus7**
- **Funimation**: [<abbr title="netrc machine"><em>funimation</em></abbr>]
- **funimation:page**: [<abbr title="netrc machine"><em>funimation</em></abbr>]
- **funimation:show**: [<abbr title="netrc machine"><em>funimation</em></abbr>]
- **Funimation**
- **funimation:page**
- **funimation:show**
- **Funk**
- **Fusion**
- **Fux**
- **Gab**
- **GabTV**
- **Gaia**: [<abbr title="netrc machine"><em>gaia</em></abbr>]
- **Gaia**
- **GameInformer**
- **GameJolt**
- **GameJoltCommunity**
@@ -432,19 +419,20 @@
- **GameStar**
- **Gaskrank**
- **Gazeta**
- **GDCVault**: [<abbr title="netrc machine"><em>gdcvault</em></abbr>]
- **GDCVault**
- **GediDigital**
- **gem.cbc.ca**: [<abbr title="netrc machine"><em>cbcgem</em></abbr>]
- **gem.cbc.ca**
- **gem.cbc.ca:live**
- **gem.cbc.ca:playlist**
- **generic**: Generic downloader that works on some sites
- **Gettr**
- **GettrStreaming**
- **Gfycat**
- **GiantBomb**
- **Giga**
- **GlattvisionTV**: [<abbr title="netrc machine"><em>glattvisiontv</em></abbr>]
- **GlattvisionTV**
- **Glide**: Glide mobile video messages (glide.me)
- **Globo**: [<abbr title="netrc machine"><em>globo</em></abbr>]
- **Globo**
- **GloboArticle**
- **glomex**: Glomex videos
- **glomex:embed**: Glomex embedded videos
@@ -453,18 +441,14 @@
- **GodTube**
- **Gofile**
- **Golem**
- **goodgame:stream**
- **google:podcasts**
- **google:podcasts:feed**
- **GoogleDrive**
- **GoogleDrive:Folder**
- **GoPro**
- **Goshgay**
- **GoToStage**
- **GPUTechConf**
- **Gronkh**
- **gronkh:feed**
- **gronkh:vods**
- **Groupon**
- **hbo**
- **HearThisAt**
@@ -476,7 +460,7 @@
- **hgtv.com:show**
- **HGTVDe**
- **HGTVUsa**
- **HiDive**: [<abbr title="netrc machine"><em>hidive</em></abbr>]
- **HiDive**
- **HistoricFilms**
- **history:player**
- **history:topic**: History.com Topic
@@ -491,8 +475,8 @@
- **Howcast**
- **HowStuffWorks**
- **hrfernsehen**
- **HRTi**: [<abbr title="netrc machine"><em>hrti</em></abbr>]
- **HRTiPlaylist**: [<abbr title="netrc machine"><em>hrti</em></abbr>]
- **HRTi**
- **HRTiPlaylist**
- **HSEProduct**
- **HSEShow**
- **Huajiao**: 花椒直播
@@ -500,9 +484,7 @@
- **Hungama**
- **HungamaAlbumPlaylist**
- **HungamaSong**
- **huya:live**: huya.com
- **Hypem**
- **Icareus**
- **ign.com**
- **IGNArticle**
- **IGNVideo**
@@ -517,21 +499,19 @@
- **Inc**
- **IndavideoEmbed**
- **InfoQ**
- **Instagram**: [<abbr title="netrc machine"><em>instagram</em></abbr>]
- **instagram:story**: [<abbr title="netrc machine"><em>instagram</em></abbr>]
- **instagram:tag**: [<abbr title="netrc machine"><em>instagram</em></abbr>] Instagram hashtag search URLs
- **instagram:user**: [<abbr title="netrc machine"><em>instagram</em></abbr>] Instagram user profile
- **Instagram**
- **instagram:story**
- **instagram:tag**: Instagram hashtag search URLs
- **instagram:user**: Instagram user profile
- **InstagramIOS**: IOS instagram:// URL
- **Internazionale**
- **InternetVideoArchive**
- **InvestigationDiscovery**
- **IPrima**: [<abbr title="netrc machine"><em>iprima</em></abbr>]
- **IPrima**
- **IPrimaCNN**
- **iq.com**: International version of iQiyi
- **iq.com:album**
- **iqiyi**: [<abbr title="netrc machine"><em>iqiyi</em></abbr>] 爱奇艺
- **ITProTV**
- **ITProTVCourse**
- **iqiyi**: 爱奇艺
- **ITTF**
- **ITV**
- **ITVBTCC**
@@ -539,12 +519,7 @@
- **ivi:compilation**: ivi.ru compilations
- **ivideon**: Ivideon TV
- **Iwara**
- **iwara:playlist**
- **iwara:user**
- **Ixigua**
- **Izlesene**
- **Jable**
- **JablePlaylist**
- **Jamendo**
- **JamendoAlbum**
- **JeuxVideo**
@@ -561,14 +536,12 @@
- **Ketnet**
- **khanacademy**
- **khanacademy:unit**
- **Kicker**
- **KickStarter**
- **KinjaEmbed**
- **KinoPoisk**
- **KonserthusetPlay**
- **Koo**
- **KrasView**: Красвью
- **KTH**
- **Ku6**
- **KUSI**
- **kuwo:album**: 酷我音乐 - 专辑
@@ -582,9 +555,6 @@
- **la7.it:podcast**
- **laola1tv**
- **laola1tv:embed**
- **LastFM**
- **LastFMPlaylist**
- **LastFMUser**
- **lbry**
- **lbry:channel**
- **LCI**
@@ -592,9 +562,9 @@
- **LcpPlay**
- **Le**: 乐视网
- **Lecture2Go**
- **Lecturio**: [<abbr title="netrc machine"><em>lecturio</em></abbr>]
- **LecturioCourse**: [<abbr title="netrc machine"><em>lecturio</em></abbr>]
- **LecturioDeCourse**: [<abbr title="netrc machine"><em>lecturio</em></abbr>]
- **Lecturio**
- **LecturioCourse**
- **LecturioDeCourse**
- **LEGO**
- **Lemonde**
- **Lenta**
@@ -603,17 +573,15 @@
- **Libsyn**
- **life**: Life.ru
- **life:embed**
- **likee**
- **likee:user**
- **limelight**
- **limelight:channel**
- **limelight:channel_list**
- **LineLive**
- **LineLiveChannel**
- **LinkedIn**: [<abbr title="netrc machine"><em>linkedin</em></abbr>]
- **linkedin:learning**: [<abbr title="netrc machine"><em>linkedin</em></abbr>]
- **linkedin:learning:course**: [<abbr title="netrc machine"><em>linkedin</em></abbr>]
- **LinuxAcademy**: [<abbr title="netrc machine"><em>linuxacademy</em></abbr>]
- **LinkedIn**
- **linkedin:learning**
- **linkedin:learning:course**
- **LinuxAcademy**
- **LiTV**
- **LiveJournal**
- **livestream**
@@ -623,10 +591,9 @@
- **loc**: Library of Congress
- **LocalNews8**
- **LoveHomePorn**
- **LRTStream**
- **LRTVOD**
- **lynda**: [<abbr title="netrc machine"><em>lynda</em></abbr>] lynda.com videos
- **lynda:course**: [<abbr title="netrc machine"><em>lynda</em></abbr>] lynda.com online courses
- **lrt.lt**
- **lynda**: lynda.com videos
- **lynda:course**: lynda.com online courses
- **m6**
- **MagentaMusik360**
- **mailru**: Видео@Mail.Ru
@@ -636,7 +603,6 @@
- **MallTV**
- **mangomolo:live**
- **mangomolo:video**
- **MangoTV**: 芒果TV
- **ManotoTV**: Manoto TV (Episode)
- **ManotoTVLive**: Manoto TV (Live)
- **ManotoTVShow**: Manoto TV (Show)
@@ -645,7 +611,6 @@
- **Markiza**
- **MarkizaPage**
- **massengeschmack.tv**
- **Masters**
- **MatchTV**
- **MDR**: MDR.DE and KiKA
- **MedalTV**
@@ -670,6 +635,7 @@
- **Metacritic**
- **mewatch**
- **Mgoon**
- **MGTV**: 芒果TV
- **MiaoPai**
- **microsoftstream**: Microsoft Stream
- **mildom**: Record ongoing live by specific user in Mildom
@@ -684,7 +650,6 @@
- **miomio.tv**
- **mirrativ**
- **mirrativ:user**
- **MirrorCoUK**
- **MiTele**: mitele.es
- **mixch**
- **mixch:archive**
@@ -695,7 +660,7 @@
- **MLBVideo**
- **MLSSoccer**
- **Mnet**
- **MNetTV**: [<abbr title="netrc machine"><em>mnettv</em></abbr>]
- **MNetTV**
- **MoeVideo**: LetitBit video services: moevideo.net, playreplay.net and videochart.net
- **Mofosex**
- **MofosexEmbed**
@@ -706,7 +671,6 @@
- **Motorsport**: motorsport.com
- **MovieClips**
- **MovieFap**
- **Moviepilot**
- **Moviezine**
- **MovingImage**
- **MSN**
@@ -741,6 +705,7 @@
- **MyVideoGe**
- **MyVidster**
- **MyviEmbed**
- **MyVisionTV**
- **n-tv.de**
- **N1Info:article**
- **N1InfoAsset**
@@ -750,7 +715,6 @@
- **NationalGeographicTV**
- **Naver**
- **Naver:live**
- **navernow**
- **NBA**
- **nba:watch**
- **nba:watch:collection**
@@ -768,9 +732,8 @@
- **ndr:embed**
- **ndr:embed:base**
- **NDTV**
- **Nebula**: [<abbr title="netrc machine"><em>watchnebula</em></abbr>]
- **nebula:channel**: [<abbr title="netrc machine"><em>watchnebula</em></abbr>]
- **nebula:subscriptions**: [<abbr title="netrc machine"><em>watchnebula</em></abbr>]
- **Nebula**
- **nebula:collection**
- **NerdCubedFeed**
- **netease:album**: 网易云音乐 - 专辑
- **netease:djradio**: 网易云音乐 - 电台
@@ -779,9 +742,7 @@
- **netease:program**: 网易云音乐 - 电台节目
- **netease:singer**: 网易云音乐 - 歌手
- **netease:song**: 网易云音乐
- **NetPlus**: [<abbr title="netrc machine"><em>netplus</em></abbr>]
- **Netverse**
- **NetversePlaylist**
- **NetPlus**
- **Netzkino**
- **Newgrounds**
- **Newgrounds:playlist**
@@ -795,8 +756,8 @@
- **NexxEmbed**
- **NFB**
- **NFHSNetwork**
- **nfl.com**: (**Currently broken**)
- **nfl.com:article**: (**Currently broken**)
- **nfl.com** (Currently broken)
- **nfl.com:article** (Currently broken)
- **NhkForSchoolBangumi**
- **NhkForSchoolProgramList**
- **NhkForSchoolSubject**: Portal page for each school subjects, like Japanese (kokugo, 国語) or math (sansuu/suugaku or 算数・数学)
@@ -808,7 +769,7 @@
- **nickelodeon:br**
- **nickelodeonru**
- **nicknight**
- **niconico**: [<abbr title="netrc machine"><em>niconico</em></abbr>] ニコニコ動画
- **niconico**: ニコニコ動画
- **niconico:history**: NicoNico user history. Requires cookies.
- **niconico:playlist**
- **niconico:series**
@@ -821,7 +782,7 @@
- **Nitter**
- **njoy**: N-JOY
- **njoy:embed**
- **NJPWWorld**: [<abbr title="netrc machine"><em>njpwworld</em></abbr>] 新日本プロレスワールド
- **NJPWWorld**: 新日本プロレスワールド
- **NobelPrize**
- **NonkTube**
- **NoodleMagazine**
@@ -894,17 +855,14 @@
- **orf:tvthek**: ORF TVthek
- **orf:vorarlberg**: Radio Vorarlberg
- **orf:wien**: Radio Wien
- **OsnatelTV**: [<abbr title="netrc machine"><em>osnateltv</em></abbr>]
- **OsnatelTV**
- **OutsideTV**
- **PacktPub**: [<abbr title="netrc machine"><em>packtpub</em></abbr>]
- **PacktPub**
- **PacktPubCourse**
- **PalcoMP3:artist**
- **PalcoMP3:song**
- **PalcoMP3:video**
- **pandora.tv**: 판도라TV
- **Panopto**
- **PanoptoList**
- **PanoptoPlaylist**
- **ParamountNetwork**
- **ParamountPlus**
- **ParamountPlusSeries**
@@ -918,7 +876,7 @@
- **peer.tv**
- **PeerTube**
- **PeerTube:Playlist**
- **peloton**: [<abbr title="netrc machine"><em>peloton</em></abbr>]
- **peloton**
- **peloton:live**: Peloton Live
- **People**
- **PerformGroup**
@@ -927,7 +885,7 @@
- **PhilharmonieDeParis**: Philharmonie de Paris
- **phoenix.de**
- **Photobucket**
- **Piapro**: [<abbr title="netrc machine"><em>piapro</em></abbr>]
- **Piapro**
- **Picarto**
- **PicartoVod**
- **Piksel**
@@ -938,27 +896,25 @@
- **pixiv:sketch:user**
- **Pladform**
- **PlanetMarathi**
- **Platzi**: [<abbr title="netrc machine"><em>platzi</em></abbr>]
- **PlatziCourse**: [<abbr title="netrc machine"><em>platzi</em></abbr>]
- **Platzi**
- **PlatziCourse**
- **play.fm**
- **player.sky.it**
- **PlayPlusTV**: [<abbr title="netrc machine"><em>playplustv</em></abbr>]
- **PlayPlusTV**
- **PlayStuff**
- **PlaysTV**
- **PlaySuisse**
- **Playtvak**: Playtvak.cz, iDNES.cz and Lidovky.cz
- **Playvid**
- **PlayVids**
- **Playwire**
- **pluralsight**: [<abbr title="netrc machine"><em>pluralsight</em></abbr>]
- **pluralsight**
- **pluralsight:course**
- **PlutoTV**
- **Podchaser**
- **podomatic**
- **Pokemon**
- **PokemonWatch**
- **PokerGo**: [<abbr title="netrc machine"><em>pokergo</em></abbr>]
- **PokerGoCollection**: [<abbr title="netrc machine"><em>pokergo</em></abbr>]
- **PokerGo**
- **PokerGoCollection**
- **PolsatGo**
- **PolskieRadio**
- **polskieradio:kierowcow**
@@ -973,11 +929,11 @@
- **Pornez**
- **PornFlip**
- **PornHd**
- **PornHub**: [<abbr title="netrc machine"><em>pornhub</em></abbr>] PornHub and Thumbzilla
- **PornHubPagedVideoList**: [<abbr title="netrc machine"><em>pornhub</em></abbr>]
- **PornHubPlaylist**: [<abbr title="netrc machine"><em>pornhub</em></abbr>]
- **PornHubUser**: [<abbr title="netrc machine"><em>pornhub</em></abbr>]
- **PornHubUserVideosUpload**: [<abbr title="netrc machine"><em>pornhub</em></abbr>]
- **PornHub**: PornHub and Thumbzilla
- **PornHubPagedVideoList**
- **PornHubPlaylist**
- **PornHubUser**
- **PornHubUserVideosUpload**
- **Pornotube**
- **PornoVoisines**
- **PornoXO**
@@ -999,8 +955,10 @@
- **qqmusic:playlist**: QQ音乐 - 歌单
- **qqmusic:singer**: QQ音乐 - 歌手
- **qqmusic:toplist**: QQ音乐 - 排行榜
- **QuantumTV**: [<abbr title="netrc machine"><em>quantumtv</em></abbr>]
- **QuantumTV**
- **Qub**
- **Quickline**
- **QuicklineLive**
- **R7**
- **R7Article**
- **Radiko**
@@ -1052,12 +1010,11 @@
- **RICE**
- **RMCDecouverte**
- **RockstarGames**
- **Rokfin**: [<abbr title="netrc machine"><em>rokfin</em></abbr>]
- **rokfin:channel**: Rokfin Channels
- **rokfin:search**: Rokfin Search; "rkfnsearch:" prefix
- **rokfin:stack**: Rokfin Stacks
- **RoosterTeeth**: [<abbr title="netrc machine"><em>roosterteeth</em></abbr>]
- **RoosterTeethSeries**: [<abbr title="netrc machine"><em>roosterteeth</em></abbr>]
- **Rokfin**
- **rokfin:channel**
- **rokfin:stack**
- **RoosterTeeth**
- **RoosterTeethSeries**
- **RottenTomatoes**
- **Rozhlas**
- **RTBF**
@@ -1096,12 +1053,12 @@
- **Ruutu**
- **Ruv**
- **ruv.is:spila**
- **safari**: [<abbr title="netrc machine"><em>safari</em></abbr>] safaribooksonline.com online video
- **safari:api**: [<abbr title="netrc machine"><em>safari</em></abbr>]
- **safari:course**: [<abbr title="netrc machine"><em>safari</em></abbr>] safaribooksonline.com online courses
- **safari**: safaribooksonline.com online video
- **safari:api**
- **safari:course**: safaribooksonline.com online courses
- **Saitosan**
- **SAKTV**: [<abbr title="netrc machine"><em>saktv</em></abbr>]
- **SaltTV**: [<abbr title="netrc machine"><em>salttv</em></abbr>]
- **SAKTV**
- **SaltTV**
- **SampleFocus**
- **Sapo**: SAPO Vídeos
- **savefrom.net**
@@ -1113,8 +1070,8 @@
- **ScreencastOMatic**
- **ScrippsNetworks**
- **scrippsnetworks:watch**
- **SCTE**: [<abbr title="netrc machine"><em>scte</em></abbr>]
- **SCTECourse**: [<abbr title="netrc machine"><em>scte</em></abbr>]
- **SCTE**
- **SCTECourse**
- **Seeker**
- **SenateGov**
- **SenateISVP**
@@ -1123,7 +1080,7 @@
- **Sexu**
- **SeznamZpravy**
- **SeznamZpravyArticle**
- **Shahid**: [<abbr title="netrc machine"><em>shahid</em></abbr>]
- **Shahid**
- **ShahidShow**
- **Shared**: shared.sx
- **ShemarooMe**
@@ -1148,22 +1105,21 @@
- **Slutload**
- **Snotr**
- **Sohu**
- **SonyLIV**: [<abbr title="netrc machine"><em>sonyliv</em></abbr>]
- **SonyLIV**
- **SonyLIVSeries**
- **soundcloud**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud:playlist**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud:related**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud:search**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>] Soundcloud search; "scsearch:" prefix
- **soundcloud:set**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud:trackstation**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud:user**: [<abbr title="netrc machine"><em>soundcloud</em></abbr>]
- **soundcloud**
- **soundcloud:playlist**
- **soundcloud:related**
- **soundcloud:search**: Soundcloud search; "scsearch:" prefix
- **soundcloud:set**
- **soundcloud:trackstation**
- **soundcloud:user**
- **SoundcloudEmbed**
- **soundgasm**
- **soundgasm:profile**
- **southpark.cc.com**
- **southpark.cc.com:español**
- **southpark.de**
- **southpark.lat**
- **southpark.nl**
- **southparkstudios.dk**
- **SovietsCloset**
@@ -1175,8 +1131,8 @@
- **Sport5**
- **SportBox**
- **SportDeutschland**
- **spotify**: Spotify episodes
- **spotify:show**: Spotify shows
- **spotify**
- **spotify:show**
- **Spreaker**
- **SpreakerPage**
- **SpreakerShow**
@@ -1203,7 +1159,6 @@
- **StretchInternet**
- **Stripchat**
- **stv:player**
- **Substack**
- **SunPorno**
- **sverigesradio:episode**
- **sverigesradio:publication**
@@ -1219,13 +1174,13 @@
- **Tass**
- **TBS**
- **TDSLifeway**
- **Teachable**: [<abbr title="netrc machine"><em>teachable</em></abbr>]
- **TeachableCourse**: [<abbr title="netrc machine"><em>teachable</em></abbr>]
- **Teachable**
- **TeachableCourse**
- **teachertube**: teachertube.com videos
- **teachertube:user:collection**: teachertube.com user and collection videos
- **TeachingChannel**
- **Teamcoco**
- **TeamTreeHouse**: [<abbr title="netrc machine"><em>teamtreehouse</em></abbr>]
- **TeamTreeHouse**
- **TechTalks**
- **techtv.mit.edu**
- **TedEmbed**
@@ -1247,8 +1202,8 @@
- **TeleQuebecVideo**
- **TeleTask**
- **Telewebion**
- **TennisTV**: [<abbr title="netrc machine"><em>tennistv</em></abbr>]
- **TenPlay**: [<abbr title="netrc machine"><em>10play</em></abbr>]
- **TennisTV**
- **TenPlay**
- **TF1**
- **TFO**
- **TheIntercept**
@@ -1279,10 +1234,10 @@
- **Tokentube**
- **Tokentube:channel**
- **ToonGoggles**
- **tou.tv**: [<abbr title="netrc machine"><em>toutv</em></abbr>]
- **tou.tv**
- **Toypics**: Toypics video
- **ToypicsUser**: Toypics user profile
- **TrailerAddict**: (**Currently broken**)
- **TrailerAddict** (Currently broken)
- **TravelChannel**
- **Trilulilu**
- **Trovo**
@@ -1293,9 +1248,9 @@
- **TruNews**
- **TruTV**
- **Tube8**
- **TubiTv**: [<abbr title="netrc machine"><em>tubitv</em></abbr>]
- **TubiTv**
- **TubiTvShow**
- **Tumblr**: [<abbr title="netrc machine"><em>tumblr</em></abbr>]
- **Tumblr**
- **tunein:clip**
- **tunein:program**
- **tunein:station**
@@ -1343,23 +1298,23 @@
- **TwitCasting**
- **TwitCastingLive**
- **TwitCastingUser**
- **twitch:clips**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **twitch:stream**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **twitch:vod**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **TwitchCollection**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **TwitchVideos**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **TwitchVideosClips**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **TwitchVideosCollections**: [<abbr title="netrc machine"><em>twitch</em></abbr>]
- **twitch:clips**
- **twitch:stream**
- **twitch:vod**
- **TwitchCollection**
- **TwitchVideos**
- **TwitchVideosClips**
- **TwitchVideosCollections**
- **twitter**
- **twitter:amplify**
- **twitter:broadcast**
- **twitter:card**
- **twitter:shortener**
- **udemy**: [<abbr title="netrc machine"><em>udemy</em></abbr>]
- **udemy:course**: [<abbr title="netrc machine"><em>udemy</em></abbr>]
- **udemy**
- **udemy:course**
- **UDNEmbed**: 聯合影音
- **UFCArabia**: [<abbr title="netrc machine"><em>ufcarabia</em></abbr>]
- **UFCTV**: [<abbr title="netrc machine"><em>ufctv</em></abbr>]
- **UFCArabia**
- **UFCTV**
- **ukcolumn**
- **UKTVPlay**
- **umg:de**: Universal Music Deutschland
@@ -1387,7 +1342,7 @@
- **VevoPlaylist**
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
- **vh1.com**
- **vhx:embed**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vhx:embed**
- **Viafree**
- **vice**
- **vice:article**
@@ -1399,49 +1354,50 @@
- **video.google:search**: Google Video search; "gvsearch:" prefix
- **video.sky.it**
- **video.sky.it:live**
- **VideocampusSachsen**
- **VideocampusSachsenEmbed**
- **VideoDetective**
- **videofy.me**
- **videomore**
- **videomore:season**
- **videomore:video**
- **VideoPress**
- **Vidio**: [<abbr title="netrc machine"><em>vidio</em></abbr>]
- **VidioLive**: [<abbr title="netrc machine"><em>vidio</em></abbr>]
- **VidioPremier**: [<abbr title="netrc machine"><em>vidio</em></abbr>]
- **Vidio**
- **VidioLive**
- **VidioPremier**
- **VidLii**
- **vier**: [<abbr title="netrc machine"><em>vier</em></abbr>] vier.be and vijf.be
- **vier**: vier.be and vijf.be
- **vier:videos**
- **viewlift**
- **viewlift:embed**
- **Viidea**
- **viki**: [<abbr title="netrc machine"><em>viki</em></abbr>]
- **viki:channel**: [<abbr title="netrc machine"><em>viki</em></abbr>]
- **vimeo**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:album**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:channel**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:group**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:likes**: [<abbr title="netrc machine"><em>vimeo</em></abbr>] Vimeo user likes
- **vimeo:ondemand**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:review**: [<abbr title="netrc machine"><em>vimeo</em></abbr>] Review pages on vimeo
- **vimeo:user**: [<abbr title="netrc machine"><em>vimeo</em></abbr>]
- **vimeo:watchlater**: [<abbr title="netrc machine"><em>vimeo</em></abbr>] Vimeo watch later list, ":vimeowatchlater" keyword (requires authentication)
- **viki**
- **viki:channel**
- **vimeo**
- **vimeo:album**
- **vimeo:channel**
- **vimeo:group**
- **vimeo:likes**: Vimeo user likes
- **vimeo:ondemand**
- **vimeo:review**: Review pages on vimeo
- **vimeo:user**
- **vimeo:watchlater**: Vimeo watch later list, "vimeowatchlater" keyword (requires authentication)
- **Vimm:recording**
- **Vimm:stream**
- **Vimp**
- **Vimple**: Vimple - one-click video hosting
- **Vine**
- **vine:user**
- **Viqeo**
- **Viu**
- **viu:ott**: [<abbr title="netrc machine"><em>viu</em></abbr>]
- **viu:ott**
- **viu:playlist**
- **Vivo**: vivo.sx
- **vk**: [<abbr title="netrc machine"><em>vk</em></abbr>] VK
- **vk:uservideos**: [<abbr title="netrc machine"><em>vk</em></abbr>] VK - User's Videos
- **vk:wallpost**: [<abbr title="netrc machine"><em>vk</em></abbr>]
- **vlive**: [<abbr title="netrc machine"><em>vlive</em></abbr>]
- **vlive:channel**: [<abbr title="netrc machine"><em>vlive</em></abbr>]
- **vlive:post**: [<abbr title="netrc machine"><em>vlive</em></abbr>]
- **vk**: VK
- **vk:uservideos**: VK - User's Videos
- **vk:wallpost**
- **vlive**
- **vlive:channel**
- **vlive:post**
- **vm.tiktok**
- **Vodlocker**
- **VODPl**
@@ -1456,12 +1412,12 @@
- **vpro**: npo.nl, ntr.nl, omroepwnl.nl, zapp.nl and npo3.nl
- **Vrak**
- **VRT**: VRT NWS, Flanders News, Flandern Info and Sporza
- **VrtNU**: [<abbr title="netrc machine"><em>vrtnu</em></abbr>] VrtNU.be
- **vrv**: [<abbr title="netrc machine"><em>vrv</em></abbr>]
- **VrtNU**: VrtNU.be
- **vrv**
- **vrv:series**
- **VShare**
- **VTM**
- **VTXTV**: [<abbr title="netrc machine"><em>vtxtv</em></abbr>]
- **VTXTV**
- **VuClip**
- **Vupload**
- **VVVVID**
@@ -1470,18 +1426,14 @@
- **Vzaar**
- **Wakanim**
- **Walla**
- **WalyTV**: [<abbr title="netrc machine"><em>walytv</em></abbr>]
- **wasdtv:clip**
- **wasdtv:record**
- **wasdtv:stream**
- **WalyTV**
- **washingtonpost**
- **washingtonpost:article**
- **wat.tv**
- **WatchBox**
- **WatchESPN**
- **WatchIndianPorn**: Watch Indian Porn
- **WDR**
- **wdr:mobile**: (**Currently broken**)
- **wdr:mobile** (Currently broken)
- **WDRElefant**
- **WDRPage**
- **web.archive:youtube**: web.archive.org saved youtube videos, "ytarchive:" prefix
@@ -1551,17 +1503,14 @@
- **YourPorn**
- **YourUpload**
- **youtube**: YouTube
- **youtube:clip**
- **youtube:favorites**: YouTube liked videos; ":ytfav" keyword (requires cookies)
- **youtube:history**: Youtube watch history; ":ythis" keyword (requires cookies)
- **youtube:music:search_url**: YouTube music search URLs with selectable sections (Eg: #songs)
- **youtube:notif**: YouTube notifications; ":ytnotif" keyword (requires cookies)
- **youtube:playlist**: YouTube playlists
- **youtube:recommended**: YouTube recommended videos; ":ytrec" keyword
- **youtube:search**: YouTube search; "ytsearch:" prefix
- **youtube:search:date**: YouTube search, newest videos first; "ytsearchdate:" prefix
- **youtube:search_url**: YouTube search URLs with sorting and filter support
- **youtube:stories**: YouTube channel stories; "ytstories:" prefix
- **youtube:subscriptions**: YouTube subscriptions feed; ":ytsubs" keyword (requires cookies)
- **youtube:tab**: YouTube Tabs
- **youtube:user**: YouTube user videos; "ytuser:" prefix
@@ -1569,23 +1518,16 @@
- **YoutubeLivestreamEmbed**: YouTube livestream embeds
- **YoutubeYtBe**: youtu.be
- **Zapiks**
- **Zattoo**: [<abbr title="netrc machine"><em>zattoo</em></abbr>]
- **ZattooLive**: [<abbr title="netrc machine"><em>zattoo</em></abbr>]
- **ZattooMovies**: [<abbr title="netrc machine"><em>zattoo</em></abbr>]
- **ZattooRecordings**: [<abbr title="netrc machine"><em>zattoo</em></abbr>]
- **Zattoo**
- **ZattooLive**
- **ZDF**
- **ZDFChannel**
- **Zee5**: [<abbr title="netrc machine"><em>zee5</em></abbr>]
- **Zee5**
- **zee5:series**
- **ZenYandex**
- **ZenYandexChannel**
- **Zhihu**
- **zingmp3**: zingmp3.vn
- **zingmp3:album**
- **zingmp3:chart-home**
- **zingmp3:chart-music-video**
- **zingmp3:user**
- **zingmp3:week-chart**
- **zoom**
- **Zype**
- **generic**: Generic downloader that works on some sites

View File

@@ -1,16 +1,26 @@
from __future__ import unicode_literals
import errno
import io
import hashlib
import json
import os.path
import re
import types
import ssl
import sys
import types
import yt_dlp.extractor
from yt_dlp import YoutubeDL
from yt_dlp.compat import compat_os_name, compat_str
from yt_dlp.utils import preferredencoding, write_string
from yt_dlp.compat import (
compat_os_name,
compat_str,
)
from yt_dlp.utils import (
preferredencoding,
write_string,
)
if 'pytest' in sys.modules:
import pytest
@@ -25,10 +35,10 @@ def get_params(override=None):
'parameters.json')
LOCAL_PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'local_parameters.json')
with open(PARAMETERS_FILE, encoding='utf-8') as pf:
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
parameters = json.load(pf)
if os.path.exists(LOCAL_PARAMETERS_FILE):
with open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
with io.open(LOCAL_PARAMETERS_FILE, encoding='utf-8') as pf:
parameters.update(json.load(pf))
if override:
parameters.update(override)
@@ -44,7 +54,7 @@ def try_rm(filename):
raise
def report_warning(message, *args, **kwargs):
def report_warning(message):
'''
Print the message to stderr, it will be prefixed with 'WARNING:'
If stderr is a tty file the 'WARNING:' will be colored
@@ -53,8 +63,8 @@ def report_warning(message, *args, **kwargs):
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
output = f'{_msg_header} {message}\n'
if 'b' in getattr(sys.stderr, 'mode', ''):
output = '%s %s\n' % (_msg_header, message)
if 'b' in getattr(sys.stderr, 'mode', '') or sys.version_info[0] < 3:
output = output.encode(preferredencoding())
sys.stderr.write(output)
@@ -64,13 +74,13 @@ class FakeYDL(YoutubeDL):
# Different instances of the downloader can't share the same dictionary
# some test set the "sublang" parameter, which would break the md5 checks.
params = get_params(override=override)
super().__init__(params, auto_init=False)
super(FakeYDL, self).__init__(params, auto_init=False)
self.result = []
def to_screen(self, s, *args, **kwargs):
def to_screen(self, s, skip_eol=None):
print(s)
def trouble(self, s, *args, **kwargs):
def trouble(self, s, tb=None):
raise Exception(s)
def download(self, x):
@@ -80,19 +90,20 @@ class FakeYDL(YoutubeDL):
# Silence an expected warning matching a regex
old_report_warning = self.report_warning
def report_warning(self, message, *args, **kwargs):
def report_warning(self, message):
if re.match(regex, message):
return
old_report_warning(message, *args, **kwargs)
old_report_warning(message)
self.report_warning = types.MethodType(report_warning, self)
def gettestcases(include_onlymatching=False):
for ie in yt_dlp.extractor.gen_extractors():
yield from ie.get_testcases(include_onlymatching)
for tc in ie.get_testcases(include_onlymatching):
yield tc
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
md5 = lambda s: hashlib.md5(s.encode('utf-8')).hexdigest()
def expect_value(self, got, expected, field):
@@ -102,30 +113,33 @@ def expect_value(self, got, expected, field):
self.assertTrue(
isinstance(got, compat_str),
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, field))
self.assertTrue(
match_rex.match(got),
f'field {field} (value: {got!r}) should match {match_str!r}')
'field %s (value: %r) should match %r' % (field, got, match_str))
elif isinstance(expected, compat_str) and expected.startswith('startswith:'):
start_str = expected[len('startswith:'):]
self.assertTrue(
isinstance(got, compat_str),
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, field))
self.assertTrue(
got.startswith(start_str),
f'field {field} (value: {got!r}) should start with {start_str!r}')
'field %s (value: %r) should start with %r' % (field, got, start_str))
elif isinstance(expected, compat_str) and expected.startswith('contains:'):
contains_str = expected[len('contains:'):]
self.assertTrue(
isinstance(got, compat_str),
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, field))
self.assertTrue(
contains_str in got,
f'field {field} (value: {got!r}) should contain {contains_str!r}')
'field %s (value: %r) should contain %r' % (field, got, contains_str))
elif isinstance(expected, type):
self.assertTrue(
isinstance(got, expected),
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
'Expected type %r for field %s, but got value %r of type %r' % (expected, field, got, type(got)))
elif isinstance(expected, dict) and isinstance(got, dict):
expect_dict(self, got, expected)
elif isinstance(expected, list) and isinstance(got, list):
@@ -145,12 +159,13 @@ def expect_value(self, got, expected, field):
if isinstance(expected, compat_str) and expected.startswith('md5:'):
self.assertTrue(
isinstance(got, compat_str),
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
'Expected field %s to be a unicode object, but got value %r of type %r' % (field, got, type(got)))
got = 'md5:' + md5(got)
elif isinstance(expected, compat_str) and re.match(r'^(?:min|max)?count:\d+', expected):
self.assertTrue(
isinstance(got, (list, dict)),
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
'Expected field %s to be a list or a dict, but it is of type %s' % (
field, type(got).__name__))
op, _, expected_num = expected.partition(':')
expected_num = int(expected_num)
if op == 'mincount':
@@ -170,7 +185,7 @@ def expect_value(self, got, expected, field):
return
self.assertEqual(
expected, got,
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
'Invalid value for field %s, expected %r, got %r' % (field, expected, got))
def expect_dict(self, got_dict, expected_dict):
@@ -181,7 +196,15 @@ def expect_dict(self, got_dict, expected_dict):
def sanitize_got_info_dict(got_dict):
IGNORED_FIELDS = (
*YoutubeDL._format_fields,
# Format keys
'url', 'manifest_url', 'format', 'format_id', 'format_note', 'width', 'height', 'resolution',
'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'vbr', 'fps', 'vcodec', 'container', 'filesize',
'filesize_approx', 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'preference',
'language', 'language_preference', 'quality', 'source_preference', 'http_headers',
'stretched_ratio', 'no_resume', 'has_drm', 'downloader_options',
# RTMP formats
'page_url', 'app', 'play_path', 'tc_url', 'flash_version', 'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time',
# Lists
'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
@@ -245,13 +268,13 @@ def expect_info_dict(self, got_dict, expected_dict):
info_dict_str = ''
if len(missing_keys) != len(expected_dict):
info_dict_str += ''.join(
f' {_repr(k)}: {_repr(v)},\n'
' %s: %s,\n' % (_repr(k), _repr(v))
for k, v in test_info_dict.items() if k not in missing_keys)
if info_dict_str:
info_dict_str += '\n'
info_dict_str += ''.join(
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
' %s: %s,\n' % (_repr(k), _repr(test_info_dict[k]))
for k in missing_keys)
write_string(
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
@@ -280,30 +303,30 @@ def assertRegexpMatches(self, text, regexp, msg=None):
def assertGreaterEqual(self, got, expected, msg=None):
if not (got >= expected):
if msg is None:
msg = f'{got!r} not greater than or equal to {expected!r}'
msg = '%r not greater than or equal to %r' % (got, expected)
self.assertTrue(got >= expected, msg)
def assertLessEqual(self, got, expected, msg=None):
if not (got <= expected):
if msg is None:
msg = f'{got!r} not less than or equal to {expected!r}'
msg = '%r not less than or equal to %r' % (got, expected)
self.assertTrue(got <= expected, msg)
def assertEqual(self, got, expected, msg=None):
if not (got == expected):
if msg is None:
msg = f'{got!r} not equal to {expected!r}'
msg = '%r not equal to %r' % (got, expected)
self.assertTrue(got == expected, msg)
def expect_warnings(ydl, warnings_re):
real_warning = ydl.report_warning
def _report_warning(w, *args, **kwargs):
def _report_warning(w):
if not any(re.search(w_re, w) for w_re in warnings_re):
real_warning(w, *args, **kwargs)
real_warning(w)
ydl.report_warning = _report_warning

1
test/swftests.unused/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
*.swf

View File

@@ -0,0 +1,19 @@
// input: [["a", "b", "c", "d"]]
// output: ["c", "b", "a", "d"]
package {
public class ArrayAccess {
public static function main(ar:Array):Array {
var aa:ArrayAccess = new ArrayAccess();
return aa.f(ar, 2);
}
private function f(ar:Array, num:Number):Array{
var x:String = ar[0];
var y:String = ar[num % ar.length];
ar[0] = y;
ar[num] = x;
return ar;
}
}
}

View File

@@ -0,0 +1,17 @@
// input: []
// output: 121
package {
public class ClassCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func(100,20);
}
}
}
class OtherClass {
public function func(x: int, y: int):int {
return x+y+1;
}
}

View File

@@ -0,0 +1,15 @@
// input: []
// output: 0
package {
public class ClassConstruction {
public static function main():int{
var f:Foo = new Foo();
return 0;
}
}
}
class Foo {
}

View File

@@ -0,0 +1,18 @@
// input: []
// output: 4
package {
public class ConstArrayAccess {
private static const x:int = 2;
private static const ar:Array = ["42", "3411"];
public static function main():int{
var c:ConstArrayAccess = new ConstArrayAccess();
return c.f();
}
public function f(): int {
return ar[1].length;
}
}
}

View File

@@ -0,0 +1,12 @@
// input: []
// output: 2
package {
public class ConstantInt {
private static const x:int = 2;
public static function main():int{
return x;
}
}
}

View File

@@ -0,0 +1,10 @@
// input: [{"x": 1, "y": 2}]
// output: 3
package {
public class DictCall {
public static function main(d:Object):int{
return d.x + d.y;
}
}
}

View File

@@ -0,0 +1,10 @@
// input: []
// output: false
package {
public class EqualsOperator {
public static function main():Boolean{
return 1 == 2;
}
}
}

View File

@@ -0,0 +1,13 @@
// input: [1, 2]
// output: 3
package {
public class LocalVars {
public static function main(a:int, b:int):int{
var c:int = a + b + b;
var d:int = c - b;
var e:int = d;
return e;
}
}
}

View File

@@ -0,0 +1,22 @@
// input: [1]
// output: 2
package {
public class MemberAssignment {
public var v:int;
public function g():int {
return this.v;
}
public function f(a:int):int{
this.v = a;
return this.v + this.g();
}
public static function main(a:int): int {
var v:MemberAssignment = new MemberAssignment();
return v.f(a);
}
}
}

View File

@@ -0,0 +1,24 @@
// input: []
// output: 123
package {
public class NeOperator {
public static function main(): int {
var res:int = 0;
if (1 != 2) {
res += 3;
} else {
res += 4;
}
if (2 != 2) {
res += 10;
} else {
res += 20;
}
if (9 == 9) {
res += 100;
}
return res;
}
}
}

View File

@@ -0,0 +1,21 @@
// input: []
// output: 9
package {
public class PrivateCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func();
}
}
}
class OtherClass {
private function pf():int {
return 9;
}
public function func():int {
return this.pf();
}
}

View File

@@ -0,0 +1,22 @@
// input: []
// output: 9
package {
public class PrivateVoidCall {
public static function main():int{
var f:OtherClass = new OtherClass();
f.func();
return 9;
}
}
}
class OtherClass {
private function pf():void {
;
}
public function func():void {
this.pf();
}
}

View File

@@ -0,0 +1,13 @@
// input: [1]
// output: 1
package {
public class StaticAssignment {
public static var v:int;
public static function main(a:int):int{
v = a;
return v;
}
}
}

View File

@@ -0,0 +1,16 @@
// input: []
// output: 1
package {
public class StaticRetrieval {
public static var v:int;
public static function main():int{
if (v) {
return 0;
} else {
return 1;
}
}
}
}

View File

@@ -0,0 +1,11 @@
// input: []
// output: 3
package {
public class StringBasics {
public static function main():int{
var s:String = "abc";
return s.length;
}
}
}

View File

@@ -0,0 +1,11 @@
// input: []
// output: 9897
package {
public class StringCharCodeAt {
public static function main():int{
var s:String = "abc";
return s.charCodeAt(1) * 100 + s.charCodeAt();
}
}
}

View File

@@ -0,0 +1,11 @@
// input: []
// output: 2
package {
public class StringConversion {
public static function main():int{
var s:String = String(99);
return s.length;
}
}
}

View File

@@ -1,23 +1,21 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
# Allow direct execution
import io
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import threading
from test.helper import FakeYDL, expect_dict, expect_value, http_server_port
from yt_dlp.compat import compat_etree_fromstring, compat_http_server
from yt_dlp.extractor import YoutubeIE, get_info_extractor
from yt_dlp.extractor.common import InfoExtractor
from yt_dlp.utils import (
ExtractorError,
RegexNotFoundError,
encode_data_uri,
strip_jsonp,
)
from yt_dlp.extractor import YoutubeIE, get_info_extractor
from yt_dlp.utils import encode_data_uri, strip_jsonp, ExtractorError, RegexNotFoundError
import threading
TEAPOT_RESPONSE_STATUS = 418
TEAPOT_RESPONSE_BODY = "<h1>418 I'm a teapot</h1>"
@@ -502,24 +500,6 @@ class TestInfoExtractor(unittest.TestCase):
}],
})
# from https://0000.studio/
# with type attribute but without extension in URL
expect_dict(
self,
self.ie._parse_html5_media_entries(
'https://0000.studio',
r'''
<video src="https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92"
controls="controls" type="video/mp4" preload="metadata" autoplay="autoplay" playsinline class="object-contain">
</video>
''', None)[0],
{
'formats': [{
'url': 'https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92',
'ext': 'mp4',
}],
})
def test_extract_jwplayer_data_realworld(self):
# from http://www.suffolk.edu/sjc/
expect_dict(
@@ -1031,7 +1011,8 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
]
for m3u8_file, m3u8_url, expected_formats, expected_subs in _TEST_CASES:
with open('./test/testdata/m3u8/%s.m3u8' % m3u8_file, encoding='utf-8') as f:
with io.open('./test/testdata/m3u8/%s.m3u8' % m3u8_file,
mode='r', encoding='utf-8') as f:
formats, subs = self.ie._parse_m3u8_formats_and_subtitles(
f.read(), m3u8_url, ext='mp4')
self.ie._sort_formats(formats)
@@ -1376,9 +1357,10 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
]
for mpd_file, mpd_url, mpd_base_url, expected_formats, expected_subtitles in _TEST_CASES:
with open('./test/testdata/mpd/%s.mpd' % mpd_file, encoding='utf-8') as f:
with io.open('./test/testdata/mpd/%s.mpd' % mpd_file,
mode='r', encoding='utf-8') as f:
formats, subtitles = self.ie._parse_mpd_formats_and_subtitles(
compat_etree_fromstring(f.read().encode()),
compat_etree_fromstring(f.read().encode('utf-8')),
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
self.ie._sort_formats(formats)
expect_value(self, formats, expected_formats, None)
@@ -1567,9 +1549,10 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
]
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
with open('./test/testdata/ism/%s.Manifest' % ism_file, encoding='utf-8') as f:
with io.open('./test/testdata/ism/%s.Manifest' % ism_file,
mode='r', encoding='utf-8') as f:
formats, subtitles = self.ie._parse_ism_formats_and_subtitles(
compat_etree_fromstring(f.read().encode()), ism_url=ism_url)
compat_etree_fromstring(f.read().encode('utf-8')), ism_url=ism_url)
self.ie._sort_formats(formats)
expect_value(self, formats, expected_formats, None)
expect_value(self, subtitles, expected_subtitles, None)
@@ -1593,9 +1576,10 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
]
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
with open('./test/testdata/f4m/%s.f4m' % f4m_file, encoding='utf-8') as f:
with io.open('./test/testdata/f4m/%s.f4m' % f4m_file,
mode='r', encoding='utf-8') as f:
formats = self.ie._parse_f4m_formats(
compat_etree_fromstring(f.read().encode()),
compat_etree_fromstring(f.read().encode('utf-8')),
f4m_url, None)
self.ie._sort_formats(formats)
expect_value(self, formats, expected_formats, None)
@@ -1640,9 +1624,10 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
]
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
with open('./test/testdata/xspf/%s.xspf' % xspf_file, encoding='utf-8') as f:
with io.open('./test/testdata/xspf/%s.xspf' % xspf_file,
mode='r', encoding='utf-8') as f:
entries = self.ie._parse_xspf(
compat_etree_fromstring(f.read().encode()),
compat_etree_fromstring(f.read().encode('utf-8')),
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
expect_value(self, entries, expected_entries, None)
for i in range(len(entries)):

View File

@@ -1,46 +1,38 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import copy
import json
from test.helper import FakeYDL, assertRegexpMatches
from test.helper import FakeYDL, assertRegexpMatches
from yt_dlp import YoutubeDL
from yt_dlp.compat import (
compat_os_name,
compat_setenv,
compat_str,
compat_urllib_error,
)
from yt_dlp.compat import compat_os_name, compat_setenv, compat_str, compat_urllib_error
from yt_dlp.extractor import YoutubeIE
from yt_dlp.extractor.common import InfoExtractor
from yt_dlp.postprocessor.common import PostProcessor
from yt_dlp.utils import (
ExtractorError,
LazyList,
OnDemandPagedList,
int_or_none,
match_filter_func,
)
from yt_dlp.utils import ExtractorError, int_or_none, match_filter_func, LazyList
TEST_URL = 'http://localhost/sample.mp4'
class YDL(FakeYDL):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super(YDL, self).__init__(*args, **kwargs)
self.downloaded_info_dicts = []
self.msgs = []
def process_info(self, info_dict):
self.downloaded_info_dicts.append(info_dict.copy())
def to_screen(self, msg, *args, **kwargs):
def to_screen(self, msg):
self.msgs.append(msg)
def dl(self, *args, **kwargs):
@@ -559,11 +551,11 @@ class TestYoutubeDL(unittest.TestCase):
def s_formats(lang, autocaption=False):
return [{
'ext': ext,
'url': f'http://localhost/video.{lang}.{ext}',
'url': 'http://localhost/video.%s.%s' % (lang, ext),
'_auto': autocaption,
} for ext in ['vtt', 'srt', 'ass']]
subtitles = {l: s_formats(l) for l in ['en', 'fr', 'es']}
auto_captions = {l: s_formats(l, True) for l in ['it', 'pt', 'es']}
subtitles = dict((l, s_formats(l)) for l in ['en', 'fr', 'es'])
auto_captions = dict((l, s_formats(l, True)) for l in ['it', 'pt', 'es'])
info_dict = {
'id': 'test',
'title': 'Test',
@@ -588,7 +580,7 @@ class TestYoutubeDL(unittest.TestCase):
result = get_info({'writesubtitles': True})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'en'})
self.assertEqual(set(subs.keys()), set(['en']))
self.assertTrue(subs['en'].get('data') is None)
self.assertEqual(subs['en']['ext'], 'ass')
@@ -599,39 +591,39 @@ class TestYoutubeDL(unittest.TestCase):
result = get_info({'writesubtitles': True, 'subtitleslangs': ['es', 'fr', 'it']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'es', 'fr'})
self.assertEqual(set(subs.keys()), set(['es', 'fr']))
result = get_info({'writesubtitles': True, 'subtitleslangs': ['all', '-en']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'es', 'fr'})
self.assertEqual(set(subs.keys()), set(['es', 'fr']))
result = get_info({'writesubtitles': True, 'subtitleslangs': ['en', 'fr', '-en']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'fr'})
self.assertEqual(set(subs.keys()), set(['fr']))
result = get_info({'writesubtitles': True, 'subtitleslangs': ['-en', 'en']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'en'})
self.assertEqual(set(subs.keys()), set(['en']))
result = get_info({'writesubtitles': True, 'subtitleslangs': ['e.+']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'es', 'en'})
self.assertEqual(set(subs.keys()), set(['es', 'en']))
result = get_info({'writesubtitles': True, 'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'es', 'pt'})
self.assertEqual(set(subs.keys()), set(['es', 'pt']))
self.assertFalse(subs['es']['_auto'])
self.assertTrue(subs['pt']['_auto'])
result = get_info({'writeautomaticsub': True, 'subtitleslangs': ['es', 'pt']})
subs = result['requested_subtitles']
self.assertTrue(subs)
self.assertEqual(set(subs.keys()), {'es', 'pt'})
self.assertEqual(set(subs.keys()), set(['es', 'pt']))
self.assertTrue(subs['es']['_auto'])
self.assertTrue(subs['pt']['_auto'])
@@ -662,7 +654,7 @@ class TestYoutubeDL(unittest.TestCase):
'duration': 100000,
'playlist_index': 1,
'playlist_autonumber': 2,
'__last_playlist_index': 100,
'_last_playlist_index': 100,
'n_entries': 10,
'formats': [{'id': 'id 1'}, {'id': 'id 2'}, {'id': 'id 3'}]
}
@@ -826,8 +818,6 @@ class TestYoutubeDL(unittest.TestCase):
test('%(id&foo)s.bar', 'foo.bar')
test('%(title&foo)s.bar', 'NA.bar')
test('%(title&foo|baz)s.bar', 'baz.bar')
test('%(x,id&foo|baz)s.bar', 'foo.bar')
test('%(x,title&foo|baz)s.bar', 'baz.bar')
# Laziness
def gen():
@@ -941,7 +931,7 @@ class TestYoutubeDL(unittest.TestCase):
res = get_videos()
self.assertEqual(res, ['1', '2'])
def f(v, incomplete):
def f(v):
if v['id'] == '1':
return None
else:
@@ -990,79 +980,41 @@ class TestYoutubeDL(unittest.TestCase):
self.assertEqual(res, [])
def test_playlist_items_selection(self):
INDICES, PAGE_SIZE = list(range(1, 11)), 3
entries = [{
'id': compat_str(i),
'title': compat_str(i),
'url': TEST_URL,
} for i in range(1, 5)]
playlist = {
'_type': 'playlist',
'id': 'test',
'entries': entries,
'extractor': 'test:playlist',
'extractor_key': 'test:playlist',
'webpage_url': 'http://example.com',
}
def entry(i, evaluated):
evaluated.append(i)
return {
'id': str(i),
'title': str(i),
'url': TEST_URL,
}
def pagedlist_entries(evaluated):
def page_func(n):
start = PAGE_SIZE * n
for i in INDICES[start: start + PAGE_SIZE]:
yield entry(i, evaluated)
return OnDemandPagedList(page_func, PAGE_SIZE)
def page_num(i):
return (i + PAGE_SIZE - 1) // PAGE_SIZE
def generator_entries(evaluated):
for i in INDICES:
yield entry(i, evaluated)
def list_entries(evaluated):
return list(generator_entries(evaluated))
def lazylist_entries(evaluated):
return LazyList(generator_entries(evaluated))
def get_downloaded_info_dicts(params, entries):
def get_downloaded_info_dicts(params):
ydl = YDL(params)
ydl.process_ie_result({
'_type': 'playlist',
'id': 'test',
'extractor': 'test:playlist',
'extractor_key': 'test:playlist',
'webpage_url': 'http://example.com',
'entries': entries,
})
# make a deep copy because the dictionary and nested entries
# can be modified
ydl.process_ie_result(copy.deepcopy(playlist))
return ydl.downloaded_info_dicts
def test_selection(params, expected_ids, evaluate_all=False):
expected_ids = list(expected_ids)
if evaluate_all:
generator_eval = pagedlist_eval = INDICES
elif not expected_ids:
generator_eval = pagedlist_eval = []
else:
generator_eval = INDICES[0: max(expected_ids)]
pagedlist_eval = INDICES[PAGE_SIZE * page_num(min(expected_ids)) - PAGE_SIZE:
PAGE_SIZE * page_num(max(expected_ids))]
def test_selection(params, expected_ids):
results = [
(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
for v in get_downloaded_info_dicts(params)]
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))))
for name, func, expected_eval in (
('list', list_entries, INDICES),
('Generator', generator_entries, generator_eval),
# ('LazyList', lazylist_entries, generator_eval), # Generator and LazyList follow the exact same code path
('PagedList', pagedlist_entries, pagedlist_eval),
):
evaluated = []
entries = func(evaluated)
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
for v in get_downloaded_info_dicts(params, entries)]
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
test_selection({}, INDICES)
test_selection({'playlistend': 20}, INDICES, True)
test_selection({'playlistend': 2}, INDICES[:2])
test_selection({'playliststart': 11}, [], True)
test_selection({'playliststart': 2}, INDICES[1:])
test_selection({'playlist_items': '2-4'}, INDICES[1:4])
test_selection({}, [1, 2, 3, 4])
test_selection({'playlistend': 10}, [1, 2, 3, 4])
test_selection({'playlistend': 2}, [1, 2])
test_selection({'playliststart': 10}, [])
test_selection({'playliststart': 2}, [2, 3, 4])
test_selection({'playlist_items': '2-4'}, [2, 3, 4])
test_selection({'playlist_items': '2,4'}, [2, 4])
test_selection({'playlist_items': '20'}, [], True)
test_selection({'playlist_items': '10'}, [])
test_selection({'playlist_items': '0'}, [])
# Tests for https://github.com/ytdl-org/youtube-dl/issues/10591
@@ -1071,33 +1023,11 @@ class TestYoutubeDL(unittest.TestCase):
# Tests for https://github.com/yt-dlp/yt-dlp/issues/720
# https://github.com/yt-dlp/yt-dlp/issues/302
test_selection({'playlistreverse': True}, INDICES[::-1])
test_selection({'playliststart': 2, 'playlistreverse': True}, INDICES[:0:-1])
test_selection({'playlistreverse': True}, [4, 3, 2, 1])
test_selection({'playliststart': 2, 'playlistreverse': True}, [4, 3, 2])
test_selection({'playlist_items': '2,4', 'playlistreverse': True}, [4, 2])
test_selection({'playlist_items': '4,2'}, [4, 2])
# Tests for --playlist-items start:end:step
test_selection({'playlist_items': ':'}, INDICES, True)
test_selection({'playlist_items': '::1'}, INDICES, True)
test_selection({'playlist_items': '::-1'}, INDICES[::-1], True)
test_selection({'playlist_items': ':6'}, INDICES[:6])
test_selection({'playlist_items': ':-6'}, INDICES[:-5], True)
test_selection({'playlist_items': '-1:6:-2'}, INDICES[:4:-2], True)
test_selection({'playlist_items': '9:-6:-2'}, INDICES[8:3:-2], True)
test_selection({'playlist_items': '1:inf:2'}, INDICES[::2], True)
test_selection({'playlist_items': '-2:inf'}, INDICES[-2:], True)
test_selection({'playlist_items': ':inf:-1'}, [], True)
test_selection({'playlist_items': '0-2:2'}, [2])
test_selection({'playlist_items': '1-:2'}, INDICES[::2], True)
test_selection({'playlist_items': '0--2:2'}, INDICES[1:-1:2], True)
test_selection({'playlist_items': '10::3'}, [10], True)
test_selection({'playlist_items': '-1::3'}, [10], True)
test_selection({'playlist_items': '11::3'}, [], True)
test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True)
test_selection({'playlist_items': '-15::15'}, [], True)
def test_urlopen_no_file_protocol(self):
# see https://github.com/ytdl-org/youtube-dl/issues/8227
ydl = YDL()
@@ -1150,7 +1080,7 @@ class TestYoutubeDL(unittest.TestCase):
class _YDL(YDL):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super(_YDL, self).__init__(*args, **kwargs)
def trouble(self, s, tb=None):
pass

View File

@@ -1,10 +1,13 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals
import os
import re
import sys
import tempfile
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp.utils import YoutubeDLCookieJar
@@ -17,7 +20,7 @@ class TestYoutubeDLCookieJar(unittest.TestCase):
tf = tempfile.NamedTemporaryFile(delete=False)
try:
cookiejar.save(filename=tf.name, ignore_discard=True, ignore_expires=True)
temp = tf.read().decode()
temp = tf.read().decode('utf-8')
self.assertTrue(re.search(
r'www\.foobar\.foobar\s+FALSE\s+/\s+TRUE\s+0\s+YoutubeDLExpiresEmpty\s+YoutubeDLExpiresEmptyValue', temp))
self.assertTrue(re.search(

View File

@@ -1,30 +1,30 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import base64
from yt_dlp.aes import (
BLOCK_SIZE_BYTES,
aes_decrypt,
aes_encrypt,
aes_ecb_encrypt,
aes_ecb_decrypt,
aes_cbc_decrypt,
aes_cbc_decrypt_bytes,
aes_cbc_encrypt,
aes_ctr_decrypt,
aes_ctr_encrypt,
aes_decrypt,
aes_decrypt_text,
aes_ecb_decrypt,
aes_ecb_encrypt,
aes_encrypt,
aes_gcm_decrypt_and_verify,
aes_gcm_decrypt_and_verify_bytes,
aes_decrypt_text,
BLOCK_SIZE_BYTES,
)
from yt_dlp.dependencies import Cryptodome_AES
from yt_dlp.compat import compat_pycrypto_AES
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
import base64
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
@@ -45,7 +45,7 @@ class TestAES(unittest.TestCase):
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
if Cryptodome_AES:
if compat_pycrypto_AES:
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
@@ -75,25 +75,25 @@ class TestAES(unittest.TestCase):
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
if Cryptodome_AES:
if compat_pycrypto_AES:
decrypted = aes_gcm_decrypt_and_verify_bytes(
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
def test_decrypt_text(self):
password = intlist_to_bytes(self.key).decode()
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
).decode()
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 16))
self.assertEqual(decrypted, self.secret_msg)
password = intlist_to_bytes(self.key).decode()
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
).decode()
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 32))
self.assertEqual(decrypted, self.secret_msg)

View File

@@ -1,12 +1,13 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import is_download_test, try_rm
from test.helper import try_rm, is_download_test
from yt_dlp import YoutubeDL

View File

@@ -1,16 +1,22 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
# Allow direct execution
import collections
import os
import sys
import unittest
import collections
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import gettestcases
from yt_dlp.extractor import FacebookIE, YoutubeIE, gen_extractors
from yt_dlp.extractor import (
FacebookIE,
gen_extractors,
YoutubeIE,
)
class TestAllURLsMatching(unittest.TestCase):
@@ -75,11 +81,11 @@ class TestAllURLsMatching(unittest.TestCase):
url = tc['url']
for ie in ies:
if type(ie).__name__ in ('GenericIE', tc['name'] + 'IE'):
self.assertTrue(ie.suitable(url), f'{type(ie).__name__} should match URL {url!r}')
self.assertTrue(ie.suitable(url), '%s should match URL %r' % (type(ie).__name__, url))
else:
self.assertFalse(
ie.suitable(url),
f'{type(ie).__name__} should not match URL {url!r} . That URL belongs to {tc["name"]}.')
'%s should not match URL %r . That URL belongs to %s.' % (type(ie).__name__, url, tc['name']))
def test_keywords(self):
self.assertMatch(':ytsubs', ['youtube:subscriptions'])
@@ -114,7 +120,7 @@ class TestAllURLsMatching(unittest.TestCase):
for (ie_name, ie_list) in name_accu.items():
self.assertEqual(
len(ie_list), 1,
f'Multiple extractors with the same IE_NAME "{ie_name}" ({", ".join(ie_list)})')
'Multiple extractors with the same IE_NAME "%s" (%s)' % (ie_name, ', '.join(ie_list)))
if __name__ == '__main__':

View File

@@ -1,15 +1,18 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals
import shutil
# Allow direct execution
import os
import shutil
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL
from yt_dlp.cache import Cache

View File

@@ -1,20 +1,26 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yt_dlp import compat
from yt_dlp.compat import (
compat_etree_fromstring,
compat_expanduser,
compat_getenv,
compat_setenv,
compat_etree_Element,
compat_etree_fromstring,
compat_expanduser,
compat_shlex_split,
compat_str,
compat_struct_unpack,
compat_urllib_parse_quote,
compat_urllib_parse_quote_plus,
compat_urllib_parse_unquote,
compat_urllib_parse_unquote_plus,
compat_urllib_parse_urlencode,
@@ -22,12 +28,6 @@ from yt_dlp.compat import (
class TestCompat(unittest.TestCase):
def test_compat_passthrough(self):
with self.assertWarns(DeprecationWarning):
compat.compat_basestring
compat.asyncio.events # Must not raise error
def test_compat_getenv(self):
test_str = 'тест'
compat_setenv('yt_dlp_COMPAT_GETENV', test_str)
@@ -42,12 +42,39 @@ class TestCompat(unittest.TestCase):
def test_compat_expanduser(self):
old_home = os.environ.get('HOME')
test_str = R'C:\Documents and Settings\тест\Application Data'
try:
compat_setenv('HOME', test_str)
self.assertEqual(compat_expanduser('~'), test_str)
finally:
compat_setenv('HOME', old_home or '')
test_str = r'C:\Documents and Settings\тест\Application Data'
compat_setenv('HOME', test_str)
self.assertEqual(compat_expanduser('~'), test_str)
compat_setenv('HOME', old_home or '')
def test_all_present(self):
import yt_dlp.compat
all_names = yt_dlp.compat.__all__
present_names = set(filter(
lambda c: '_' in c and not c.startswith('_'),
dir(yt_dlp.compat))) - set(['unicode_literals'])
self.assertEqual(all_names, sorted(present_names))
def test_compat_urllib_parse_quote(self):
self.assertEqual(compat_urllib_parse_quote('abc def'), 'abc%20def')
self.assertEqual(compat_urllib_parse_quote('/user/abc+def'), '/user/abc%2Bdef')
self.assertEqual(compat_urllib_parse_quote('/user/abc+def', safe='+'), '%2Fuser%2Fabc+def')
self.assertEqual(compat_urllib_parse_quote(''), '')
self.assertEqual(compat_urllib_parse_quote('%'), '%25')
self.assertEqual(compat_urllib_parse_quote('%', safe='%'), '%')
self.assertEqual(compat_urllib_parse_quote('津波'), '%E6%B4%A5%E6%B3%A2')
self.assertEqual(
compat_urllib_parse_quote('''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''', safe='<>=":%/ \r\n'),
'''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a''')
self.assertEqual(
compat_urllib_parse_quote('''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%25Things%''', safe='% '),
'''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%''')
def test_compat_urllib_parse_quote_plus(self):
self.assertEqual(compat_urllib_parse_quote_plus('abc def'), 'abc+def')
self.assertEqual(compat_urllib_parse_quote_plus('/abc def'), '%2Fabc+def')
def test_compat_urllib_parse_unquote(self):
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
@@ -82,6 +109,17 @@ class TestCompat(unittest.TestCase):
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
def test_compat_shlex_split(self):
self.assertEqual(compat_shlex_split('-option "one two"'), ['-option', 'one two'])
self.assertEqual(compat_shlex_split('-option "one\ntwo" \n -flag'), ['-option', 'one\ntwo', '-flag'])
self.assertEqual(compat_shlex_split('-val 中文'), ['-val', '中文'])
def test_compat_etree_Element(self):
try:
compat_etree_Element.items
except AttributeError:
self.fail('compat_etree_Element is not a type')
def test_compat_etree_fromstring(self):
xml = '''
<root foo="bar" spam="中文">
@@ -90,7 +128,7 @@ class TestCompat(unittest.TestCase):
<foo><bar>spam</bar></foo>
</root>
'''
doc = compat_etree_fromstring(xml.encode())
doc = compat_etree_fromstring(xml.encode('utf-8'))
self.assertTrue(isinstance(doc.attrib['foo'], compat_str))
self.assertTrue(isinstance(doc.attrib['spam'], compat_str))
self.assertTrue(isinstance(doc.find('normal').text, compat_str))

View File

@@ -6,24 +6,24 @@ from yt_dlp.cookies import (
LinuxChromeCookieDecryptor,
MacChromeCookieDecryptor,
WindowsChromeCookieDecryptor,
_get_linux_desktop_environment,
_LinuxDesktopEnvironment,
parse_safari_cookies,
pbkdf2_sha1,
_get_linux_desktop_environment,
_LinuxDesktopEnvironment,
)
class Logger:
def debug(self, message, *args, **kwargs):
def debug(self, message):
print(f'[verbose] {message}')
def info(self, message, *args, **kwargs):
def info(self, message):
print(message)
def warning(self, message, *args, **kwargs):
def warning(self, message, only_once=False):
self.error(message)
def error(self, message, *args, **kwargs):
def error(self, message):
raise Exception(message)

Some files were not shown because too many files have changed in this diff Show More