diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8f5df4ce4d..9a861bb4fd 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -343,14 +343,14 @@ jobs: brew uninstall --ignore-dependencies python3 python3 -m venv ~/yt-dlp-build-venv source ~/yt-dlp-build-venv/bin/activate - python3 devscripts/install_deps.py --only-optional-groups --include-group build - python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt + python3 devscripts/install_deps.py --omit-default --include-extra build + python3 devscripts/install_deps.py --print --include-extra pyinstaller > requirements.txt # We need to ignore wheels otherwise we break universal2 builds python3 -m pip install -U --no-binary :all: -r requirements.txt # We need to fuse our own universal2 wheels for curl_cffi python3 -m pip install -U 'delocate==0.11.0' mkdir curl_cffi_whls curl_cffi_universal2 - python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt + python3 devscripts/install_deps.py --print --omit-default --include-extra curl-cffi > requirements.txt for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do python3 -m pip download \ --only-binary=:all: \ @@ -484,11 +484,11 @@ jobs: mkdir /pyi-wheels python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}" python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}" - python devscripts/install_deps.py --only-optional-groups --include-group build + python devscripts/install_deps.py --omit-default --include-extra build if ("${Env:ARCH}" -eq "x86") { python devscripts/install_deps.py } else { - python devscripts/install_deps.py --include-group curl-cffi + python devscripts/install_deps.py --include-extra curl-cffi } - name: Prepare diff --git a/.github/workflows/challenge-tests.yml b/.github/workflows/challenge-tests.yml index 89895eb07b..7f1bcfb5b1 100644 --- a/.github/workflows/challenge-tests.yml +++ b/.github/workflows/challenge-tests.yml @@ -67,7 +67,7 @@ jobs: unzip quickjs.zip - name: Install test requirements run: | - python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt + python ./devscripts/install_deps.py --print --omit-default --include-extra test > requirements.txt python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt python -m pip install -U -r requirements.txt - name: Run tests diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index 3cb17f2b7d..f2d595ef73 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -63,7 +63,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: Install test requirements - run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi + run: python ./devscripts/install_deps.py --include-extra test --include-extra curl-cffi - name: Run tests timeout-minutes: 15 continue-on-error: False diff --git a/.github/workflows/download.yml b/.github/workflows/download.yml index d075270d7b..343c720c5e 100644 --- a/.github/workflows/download.yml +++ b/.github/workflows/download.yml @@ -15,7 +15,7 @@ jobs: with: python-version: '3.10' - name: Install test requirements - run: python ./devscripts/install_deps.py --include-group dev + run: python ./devscripts/install_deps.py --include-extra dev - name: Run tests continue-on-error: true run: python ./devscripts/run_tests.py download @@ -42,7 +42,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: Install test requirements - run: python ./devscripts/install_deps.py --include-group dev + run: python ./devscripts/install_deps.py --include-extra dev - name: Run tests continue-on-error: true run: python ./devscripts/run_tests.py download diff --git a/.github/workflows/quick-test.yml b/.github/workflows/quick-test.yml index a6e84b1d80..4476d018b7 100644 --- a/.github/workflows/quick-test.yml +++ b/.github/workflows/quick-test.yml @@ -15,7 +15,7 @@ jobs: with: python-version: '3.10' - name: Install test requirements - run: python ./devscripts/install_deps.py --only-optional-groups --include-group test + run: python ./devscripts/install_deps.py --omit-default --include-extra test - name: Run tests timeout-minutes: 15 run: | @@ -31,7 +31,7 @@ jobs: with: python-version: '3.10' - name: Install dev dependencies - run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis + run: python ./devscripts/install_deps.py --omit-default --include-extra static-analysis - name: Make lazy extractors run: python ./devscripts/make_lazy_extractors.py - name: Run ruff diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index afe1d384b4..2dfc7f2aa7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -180,7 +180,7 @@ jobs: - name: Install Requirements run: | sudo apt -y install pandoc man - python devscripts/install_deps.py --only-optional-groups --include-group build + python devscripts/install_deps.py --omit-default --include-extra build - name: Prepare env: diff --git a/.github/workflows/test-workflows.yml b/.github/workflows/test-workflows.yml index d39ab8814b..5fda040b2a 100644 --- a/.github/workflows/test-workflows.yml +++ b/.github/workflows/test-workflows.yml @@ -34,7 +34,7 @@ jobs: env: ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }} run: | - python -m devscripts.install_deps --only-optional-groups --include-group test + python -m devscripts.install_deps --omit-default --include-extra test sudo apt -y install shellcheck python -m pip install -U pyflakes curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 99f18b2f32..f161da3c1d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -177,7 +177,7 @@ While it is strongly recommended to use `hatch` for yt-dlp development, if you a ```shell # To only install development dependencies: -$ python -m devscripts.install_deps --include-group dev +$ python -m devscripts.install_deps --include-extra dev # Or, for an editable install plus dev dependencies: $ python -m pip install -e ".[default,dev]" diff --git a/README.md b/README.md index e04d98045b..d42ebd539d 100644 --- a/README.md +++ b/README.md @@ -265,7 +265,7 @@ To build the standalone executable, you must have Python and `pyinstaller` (plus You can run the following commands: ``` -python devscripts/install_deps.py --include-group pyinstaller +python devscripts/install_deps.py --include-extra pyinstaller python devscripts/make_lazy_extractors.py python -m bundle.pyinstaller ``` diff --git a/bundle/docker/linux/build.sh b/bundle/docker/linux/build.sh index b30d40980e..d6d1791865 100755 --- a/bundle/docker/linux/build.sh +++ b/bundle/docker/linux/build.sh @@ -15,12 +15,12 @@ function venvpy { } INCLUDES=( - --include-group pyinstaller - --include-group secretstorage + --include-extra pyinstaller + --include-extra secretstorage ) if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then - INCLUDES+=(--include-group curl-cffi) + INCLUDES+=(--include-extra curl-cffi) fi runpy -m venv /yt-dlp-build-venv @@ -28,7 +28,7 @@ runpy -m venv /yt-dlp-build-venv source /yt-dlp-build-venv/bin/activate # Inside the venv we use venvpy instead of runpy venvpy -m ensurepip --upgrade --default-pip -venvpy -m devscripts.install_deps --only-optional-groups --include-group build +venvpy -m devscripts.install_deps --omit-default --include-extra build venvpy -m devscripts.install_deps "${INCLUDES[@]}" venvpy -m devscripts.make_lazy_extractors venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}" diff --git a/devscripts/install_deps.py b/devscripts/install_deps.py index 07c646a4c0..762213e2f9 100755 --- a/devscripts/install_deps.py +++ b/devscripts/install_deps.py @@ -25,16 +25,16 @@ def parse_args(): '-e', '--exclude-dependency', metavar='DEPENDENCY', action='append', help='exclude a dependency (can be used multiple times)') parser.add_argument( - '-i', '--include-group', metavar='GROUP', action='append', - help='include an optional dependency group (can be used multiple times)') + '-i', '--include-extra', metavar='EXTRA', action='append', + help='include an extra/optional-dependencies list (can be used multiple times)') parser.add_argument( '-c', '--cherry-pick', metavar='DEPENDENCY', action='append', help=( 'only include a specific dependency from the resulting dependency list ' '(can be used multiple times)')) parser.add_argument( - '-o', '--only-optional-groups', action='store_true', - help='omit default dependencies unless the "default" group is specified with --include-group') + '-o', '--omit-default', action='store_true', + help='omit the "default" extra unless it is explicitly included (it is included by default)') parser.add_argument( '-p', '--print', action='store_true', help='only print requirements to stdout') @@ -51,27 +51,27 @@ def uniq(arg) -> dict[str, None]: def main(): args = parse_args() project_table = parse_toml(read_file(args.input))['project'] - recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P[\w-]+)\]') - optional_groups = project_table['optional-dependencies'] + recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P[\w-]+)\]') + extras = project_table['optional-dependencies'] excludes = uniq(args.exclude_dependency) only_includes = uniq(args.cherry_pick) - include_groups = uniq(args.include_group) + include_extras = uniq(args.include_extra) - def yield_deps(group): - for dep in group: + def yield_deps(extra): + for dep in extra: if mobj := recursive_pattern.fullmatch(dep): - yield from optional_groups.get(mobj.group('group_name'), ()) + yield from extras.get(mobj.group('extra_name'), ()) else: yield dep targets = {} - if not args.only_optional_groups: + if not args.omit_default: # legacy: 'dependencies' is empty now targets.update(dict.fromkeys(project_table['dependencies'])) - targets.update(dict.fromkeys(yield_deps(optional_groups['default']))) + targets.update(dict.fromkeys(yield_deps(extras['default']))) - for include in filter(None, map(optional_groups.get, include_groups)): + for include in filter(None, map(extras.get, include_extras)): targets.update(dict.fromkeys(yield_deps(include))) def target_filter(target):