diff --git a/.github/ISSUE_TEMPLATE/1_broken_site.yml b/.github/ISSUE_TEMPLATE/1_broken_site.yml index cdbb86760..77b777d5a 100644 --- a/.github/ISSUE_TEMPLATE/1_broken_site.yml +++ b/.github/ISSUE_TEMPLATE/1_broken_site.yml @@ -1,5 +1,5 @@ -name: Broken site -description: Report error in a supported site +name: Broken site support +description: Report issue with yt-dlp on a supported site labels: [triage, site-bug] body: - type: checkboxes @@ -16,7 +16,7 @@ body: description: | Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp: options: - - label: I'm reporting that a **supported** site is broken + - label: I'm reporting that yt-dlp is broken on a **supported** site required: true - label: I've verified that I'm running yt-dlp version **2023.03.04** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit) required: true diff --git a/.github/ISSUE_TEMPLATE/4_bug_report.yml b/.github/ISSUE_TEMPLATE/4_bug_report.yml index bf1d97bba..122dda4f2 100644 --- a/.github/ISSUE_TEMPLATE/4_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/4_bug_report.yml @@ -1,4 +1,4 @@ -name: Bug report +name: Core bug report description: Report a bug unrelated to any particular site or extractor labels: [triage, bug] body: diff --git a/.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml b/.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml index 1f6f92634..a51db789f 100644 --- a/.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml +++ b/.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml @@ -1,5 +1,5 @@ -name: Broken site -description: Report error in a supported site +name: Broken site support +description: Report issue with yt-dlp on a supported site labels: [triage, site-bug] body: %(no_skip)s @@ -10,7 +10,7 @@ body: description: | Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp: options: - - label: I'm reporting that a **supported** site is broken + - label: I'm reporting that yt-dlp is broken on a **supported** site required: true - label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit) required: true diff --git a/.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml b/.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml index 90f59e70b..9ab490267 100644 --- a/.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml +++ b/.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml @@ -1,4 +1,4 @@ -name: Bug report +name: Core bug report description: Report a bug unrelated to any particular site or extractor labels: [triage, bug] body: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index c4d3e812e..cbed82173 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -40,4 +40,10 @@ ### What is the purpose of your *pull request*? - [ ] Core bug fix/improvement - [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes)) + + +
Copilot Summary + +copilot:all +
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index aa11c6194..ac0cfdf7c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -41,7 +41,7 @@ on: required: true type: string channel: - description: Update channel (stable/nightly) + description: Update channel (stable/nightly/...) required: true default: stable type: string @@ -127,6 +127,19 @@ jobs: mv ./dist/yt-dlp_linux ./yt-dlp_linux mv ./dist/yt-dlp_linux.zip ./yt-dlp_linux.zip + - name: Verify --update-to + if: vars.UPDATE_TO_VERIFICATION + run: | + binaries=("yt-dlp" "yt-dlp_linux") + for binary in "${binaries[@]}"; do + chmod +x ./${binary} + cp ./${binary} ./${binary}_downgraded + version="$(./${binary} --version)" + ./${binary}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 + downgraded_version="$(./${binary}_downgraded --version)" + [[ "$version" != "$downgraded_version" ]] + done + - name: Upload artifacts uses: actions/upload-artifact@v3 with: @@ -176,6 +189,16 @@ jobs: python3.8 devscripts/make_lazy_extractors.py python3.8 pyinst.py + if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then + arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}" + chmod +x ./dist/yt-dlp_linux_${arch} + cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded + version="$(./dist/yt-dlp_linux_${arch} --version)" + ./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 + downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)" + [[ "$version" != "$downgraded_version" ]] + fi + - name: Upload artifacts uses: actions/upload-artifact@v3 with: @@ -188,21 +211,33 @@ jobs: steps: - uses: actions/checkout@v3 - # NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used + # NB: Building universal2 does not work with python from actions/setup-python - name: Install Requirements run: | brew install coreutils - /usr/bin/python3 -m pip install -U --user pip Pyinstaller==5.8 -r requirements.txt + python3 -m pip install -U --user pip setuptools wheel + # We need to ignore wheels otherwise we break universal2 builds + python3 -m pip install -U --user --no-binary :all: Pyinstaller -r requirements.txt - name: Prepare run: | - /usr/bin/python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }} - /usr/bin/python3 devscripts/make_lazy_extractors.py + python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }} + python3 devscripts/make_lazy_extractors.py - name: Build run: | - /usr/bin/python3 pyinst.py --target-architecture universal2 --onedir + python3 pyinst.py --target-architecture universal2 --onedir (cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .) - /usr/bin/python3 pyinst.py --target-architecture universal2 + python3 pyinst.py --target-architecture universal2 + + - name: Verify --update-to + if: vars.UPDATE_TO_VERIFICATION + run: | + chmod +x ./dist/yt-dlp_macos + cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded + version="$(./dist/yt-dlp_macos --version)" + ./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 + downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)" + [[ "$version" != "$downgraded_version" ]] - name: Upload artifacts uses: actions/upload-artifact@v3 @@ -232,7 +267,8 @@ jobs: - name: Install Requirements run: | brew install coreutils - python3 -m pip install -U --user pip Pyinstaller -r requirements.txt + python3 -m pip install -U --user pip setuptools wheel + python3 -m pip install -U --user Pyinstaller -r requirements.txt - name: Prepare run: | @@ -243,6 +279,16 @@ jobs: python3 pyinst.py mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy + - name: Verify --update-to + if: vars.UPDATE_TO_VERIFICATION + run: | + chmod +x ./dist/yt-dlp_macos_legacy + cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded + version="$(./dist/yt-dlp_macos_legacy --version)" + ./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 + downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)" + [[ "$version" != "$downgraded_version" ]] + - name: Upload artifacts uses: actions/upload-artifact@v3 with: @@ -275,6 +321,19 @@ jobs: python pyinst.py --onedir Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip + - name: Verify --update-to + if: vars.UPDATE_TO_VERIFICATION + run: | + foreach ($name in @("yt-dlp","yt-dlp_min")) { + Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe" + $version = & "./dist/${name}.exe" --version + & "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04 + $downgraded_version = & "./dist/${name}_downgraded.exe" --version + if ($version -eq $downgraded_version) { + exit 1 + } + } + - name: Upload artifacts uses: actions/upload-artifact@v3 with: @@ -306,6 +365,19 @@ jobs: run: | python pyinst.py + - name: Verify --update-to + if: vars.UPDATE_TO_VERIFICATION + run: | + foreach ($name in @("yt-dlp_x86")) { + Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe" + $version = & "./dist/${name}.exe" --version + & "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04 + $downgraded_version = & "./dist/${name}_downgraded.exe" --version + if ($version -eq $downgraded_version) { + exit 1 + } + } + - name: Upload artifacts uses: actions/upload-artifact@v3 with: @@ -313,7 +385,7 @@ jobs: dist/yt-dlp_x86.exe meta_files: - if: inputs.meta_files && always() + if: inputs.meta_files && always() && !cancelled() needs: - unix - linux_arm diff --git a/.github/workflows/potential-duplicates.yml b/.github/workflows/potential-duplicates.yml new file mode 100644 index 000000000..1521ae20c --- /dev/null +++ b/.github/workflows/potential-duplicates.yml @@ -0,0 +1,20 @@ +name: Potential Duplicates +on: + issues: + types: [opened, edited] + +jobs: + run: + runs-on: ubuntu-latest + steps: + - uses: wow-actions/potential-duplicates@v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + label: potential-duplicate + state: all + threshold: 0.7 + comment: | + This issue is potentially a duplicate of one of the following issues: + {{#issues}} + - #{{ number }} ({{ accuracy }}%) + {{/issues}} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 8a1bd9a01..9ebf54e7f 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -2,16 +2,20 @@ name: Publish on: workflow_call: inputs: - nightly: - default: false - required: false - type: boolean + channel: + default: stable + required: true + type: string version: required: true type: string target_commitish: required: true type: string + prerelease: + default: false + required: true + type: boolean secrets: ARCHIVE_REPO_TOKEN: required: false @@ -34,16 +38,27 @@ jobs: - name: Generate release notes run: | + printf '%s' \ + '[![Installation](https://img.shields.io/badge/-Which%20file%20should%20I%20download%3F-white.svg?style=for-the-badge)]' \ + '(https://github.com/yt-dlp/yt-dlp#installation "Installation instructions") ' \ + '[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \ + '(https://github.com/yt-dlp/yt-dlp/tree/2023.03.04#readme "Documentation") ' \ + '[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \ + '(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \ + '[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \ + '(https://discord.gg/H5MNcFW63r "Discord") ' \ + ${{ inputs.channel != 'nightly' && '"[![Nightly](https://img.shields.io/badge/Get%20nightly%20builds-purple.svg?style=for-the-badge)]" \ + "(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\")"' || '' }} \ + > ./RELEASE_NOTES + printf '\n\n' >> ./RELEASE_NOTES cat >> ./RELEASE_NOTES << EOF #### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files) --- -

Changelog

- $(python ./devscripts/make_changelog.py -vv) -
+ $(python ./devscripts/make_changelog.py -vv --collapsible) EOF - echo "**This is an automated nightly pre-release build**" >> ./PRERELEASE_NOTES - cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES - echo "Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}" >> ./ARCHIVE_NOTES + printf '%s\n\n' '**This is an automated nightly pre-release build**' >> ./NIGHTLY_NOTES + cat ./RELEASE_NOTES >> ./NIGHTLY_NOTES + printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}' >> ./ARCHIVE_NOTES cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES - name: Archive nightly release @@ -51,7 +66,7 @@ jobs: GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }} GH_REPO: ${{ vars.ARCHIVE_REPO }} if: | - inputs.nightly && env.GH_TOKEN != '' && env.GH_REPO != '' + inputs.channel == 'nightly' && env.GH_TOKEN != '' && env.GH_REPO != '' run: | gh release create \ --notes-file ARCHIVE_NOTES \ @@ -60,7 +75,7 @@ jobs: artifact/* - name: Prune old nightly release - if: inputs.nightly && !vars.ARCHIVE_REPO + if: inputs.channel == 'nightly' && !vars.ARCHIVE_REPO env: GH_TOKEN: ${{ github.token }} run: | @@ -68,14 +83,15 @@ jobs: git tag --delete "nightly" || true sleep 5 # Enough time to cover deletion race condition - - name: Publish release${{ inputs.nightly && ' (nightly)' || '' }} + - name: Publish release${{ inputs.channel == 'nightly' && ' (nightly)' || '' }} env: GH_TOKEN: ${{ github.token }} - if: (inputs.nightly && !vars.ARCHIVE_REPO) || !inputs.nightly + if: (inputs.channel == 'nightly' && !vars.ARCHIVE_REPO) || inputs.channel != 'nightly' run: | gh release create \ - --notes-file ${{ inputs.nightly && 'PRE' || '' }}RELEASE_NOTES \ + --notes-file ${{ inputs.channel == 'nightly' && 'NIGHTLY_NOTES' || 'RELEASE_NOTES' }} \ --target ${{ inputs.target_commitish }} \ - --title "yt-dlp ${{ inputs.nightly && 'nightly ' || '' }}${{ inputs.version }}" \ - ${{ inputs.nightly && '--prerelease "nightly"' || inputs.version }} \ + --title "yt-dlp ${{ inputs.channel == 'nightly' && 'nightly ' || '' }}${{ inputs.version }}" \ + ${{ inputs.prerelease && '--prerelease' || '' }} \ + ${{ inputs.channel == 'nightly' && '"nightly"' || inputs.version }} \ artifact/* diff --git a/.github/workflows/release-nightly.yml b/.github/workflows/release-nightly.yml index d4f01ab64..543e2e6f7 100644 --- a/.github/workflows/release-nightly.yml +++ b/.github/workflows/release-nightly.yml @@ -46,6 +46,7 @@ jobs: permissions: contents: write with: - nightly: true + channel: nightly + prerelease: true version: ${{ needs.prepare.outputs.version }} target_commitish: ${{ github.sha }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e07fc0c07..ada508be8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,5 +1,22 @@ name: Release -on: workflow_dispatch +on: + workflow_dispatch: + inputs: + version: + description: Version tag (YYYY.MM.DD[.REV]) + required: false + default: '' + type: string + channel: + description: Update channel (stable/nightly/...) + required: false + default: '' + type: string + prerelease: + description: Pre-release + default: false + type: boolean + permissions: contents: read @@ -9,8 +26,9 @@ jobs: contents: write runs-on: ubuntu-latest outputs: + channel: ${{ steps.set_channel.outputs.channel }} version: ${{ steps.update_version.outputs.version }} - head_sha: ${{ steps.push_release.outputs.head_sha }} + head_sha: ${{ steps.get_target.outputs.head_sha }} steps: - uses: actions/checkout@v3 @@ -21,10 +39,18 @@ jobs: with: python-version: "3.10" + - name: Set channel + id: set_channel + run: | + CHANNEL="${{ github.repository == 'yt-dlp/yt-dlp' && 'stable' || github.repository }}" + echo "channel=${{ inputs.channel || '$CHANNEL' }}" > "$GITHUB_OUTPUT" + - name: Update version id: update_version run: | - python devscripts/update-version.py ${{ vars.PUSH_VERSION_COMMIT == '' && '"$(date -u +"%H%M%S")"' || '' }} | \ + REVISION="${{ vars.PUSH_VERSION_COMMIT == '' && '$(date -u +"%H%M%S")' || '' }}" + REVISION="${{ inputs.prerelease && '$(date -u +"%H%M%S")' || '$REVISION' }}" + python devscripts/update-version.py ${{ inputs.version || '$REVISION' }} | \ grep -Po "version=\d+\.\d+\.\d+(\.\d+)?" >> "$GITHUB_OUTPUT" - name: Update documentation @@ -39,6 +65,7 @@ jobs: - name: Push to release id: push_release + if: ${{ !inputs.prerelease }} run: | git config --global user.name github-actions git config --global user.email github-actions@example.com @@ -46,14 +73,30 @@ jobs: git commit -m "Release ${{ steps.update_version.outputs.version }}" \ -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl" git push origin --force ${{ github.event.ref }}:release + + - name: Get target commitish + id: get_target + run: | echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT" - name: Update master - if: vars.PUSH_VERSION_COMMIT != '' + if: vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease run: git push origin ${{ github.event.ref }} - publish_pypi_homebrew: + build: needs: prepare + uses: ./.github/workflows/build.yml + with: + version: ${{ needs.prepare.outputs.version }} + channel: ${{ needs.prepare.outputs.channel }} + permissions: + contents: read + packages: write # For package cache + secrets: + GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} + + publish_pypi_homebrew: + needs: [prepare, build] runs-on: ubuntu-latest steps: @@ -77,7 +120,7 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - if: env.TWINE_PASSWORD != '' + if: env.TWINE_PASSWORD != '' && !inputs.prerelease run: | rm -rf dist/* make pypi-files @@ -89,7 +132,7 @@ jobs: env: BREW_TOKEN: ${{ secrets.BREW_TOKEN }} PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' + if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease uses: actions/checkout@v3 with: repository: yt-dlp/homebrew-taps @@ -100,7 +143,7 @@ jobs: env: BREW_TOKEN: ${{ secrets.BREW_TOKEN }} PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' + if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease run: | python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.version }}" git -C taps/ config user.name github-actions @@ -108,22 +151,13 @@ jobs: git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.version }}' git -C taps/ push - build: - needs: prepare - uses: ./.github/workflows/build.yml - with: - version: ${{ needs.prepare.outputs.version }} - permissions: - contents: read - packages: write # For package cache - secrets: - GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} - publish: needs: [prepare, build] uses: ./.github/workflows/publish.yml permissions: contents: write with: + channel: ${{ needs.prepare.outputs.channel }} + prerelease: ${{ inputs.prerelease }} version: ${{ needs.prepare.outputs.version }} target_commitish: ${{ needs.prepare.outputs.head_sha }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ae2c45423..a8587fe92 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,7 +79,7 @@ ### Are you using the latest version? ### Is the issue already documented? -Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, feel free to write something along the lines of "This affects me as well, with version 2021.01.01. Here is some more information on the issue: ...". While some issues may be old, a new post into them often spurs rapid activity. +Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, subcribe to it to be notified when there is any progress. Unless you have something useful to add to the converation, please refrain from commenting. Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here. @@ -246,7 +246,7 @@ ## yt-dlp coding conventions This section introduces a guide lines for writing idiomatic, robust and future-proof extractor code. -Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the the extractor will remain broken. +Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the extractor will remain broken. ### Mandatory and optional metafields diff --git a/Collaborators.md b/Collaborators.md index 71baf5080..a0976dd8c 100644 --- a/Collaborators.md +++ b/Collaborators.md @@ -8,7 +8,7 @@ # Collaborators ## [pukkandan](https://github.com/pukkandan) [![ko-fi](https://img.shields.io/badge/_-Ko--fi-red.svg?logo=kofi&labelColor=555555&style=for-the-badge)](https://ko-fi.com/pukkandan) -[![gh-sponsor](https://img.shields.io/badge/_-Github-red.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/pukkandan) +[![gh-sponsor](https://img.shields.io/badge/_-Github-white.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/pukkandan) * Owner of the fork @@ -26,7 +26,7 @@ ## [shirt](https://github.com/shirt-dev) ## [coletdjnz](https://github.com/coletdjnz) -[![gh-sponsor](https://img.shields.io/badge/_-Github-red.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/coletdjnz) +[![gh-sponsor](https://img.shields.io/badge/_-Github-white.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/coletdjnz) * Improved plugin architecture * YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements @@ -44,7 +44,7 @@ ## [Ashish0804](https://github.com/Ashish0804) [Inactive] * Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc -## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao) +## [Lesmiscore](https://github.com/Lesmiscore) **Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s **Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr @@ -64,7 +64,7 @@ ## [bashonly](https://github.com/bashonly) ## [Grub4K](https://github.com/Grub4K) -[![ko-fi](https://img.shields.io/badge/_-Ko--fi-red.svg?logo=kofi&labelColor=555555&style=for-the-badge)](https://ko-fi.com/Grub4K) [![gh-sponsor](https://img.shields.io/badge/_-Github-red.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/Grub4K) +[![ko-fi](https://img.shields.io/badge/_-Ko--fi-red.svg?logo=kofi&labelColor=555555&style=for-the-badge)](https://ko-fi.com/Grub4K) [![gh-sponsor](https://img.shields.io/badge/_-Github-white.svg?logo=github&labelColor=555555&style=for-the-badge)](https://github.com/sponsors/Grub4K) * `--update-to`, automated release, nightly builds * Rework internals like `traverse_obj`, various core refactors and bugs fixes diff --git a/Makefile b/Makefile index d5d47629b..b1ac0e7d6 100644 --- a/Makefile +++ b/Makefile @@ -74,7 +74,7 @@ offlinetest: codetest $(PYTHON) -m pytest -k "not download" # XXX: This is hard to maintain -CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/dependencies +CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/compat/urllib yt_dlp/utils yt_dlp/dependencies yt-dlp: yt_dlp/*.py yt_dlp/*/*.py mkdir -p zip for d in $(CODE_FOLDERS) ; do \ diff --git a/README.md b/README.md index c1f34235d..3d89c0af9 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ # NEW FEATURES * **Merged with animelover1984/youtube-dl**: You get most of the features and improvements from [animelover1984/youtube-dl](https://github.com/animelover1984/youtube-dl) including `--write-comments`, `BiliBiliSearch`, `BilibiliChannel`, Embedding thumbnail in mp4/ogg/opus, playlist infojson etc. Note that NicoNico livestreams are not available. See [#31](https://github.com/yt-dlp/yt-dlp/pull/31) for details. * **YouTube improvements**: - * Supports Clips, Stories (`ytstories:`), Search (including filters)**\***, YouTube Music Search, Channel-specific search, Search prefixes (`ytsearch:`, `ytsearchdate:`)**\***, Mixes, YouTube Music Albums/Channels ([except self-uploaded music](https://github.com/yt-dlp/yt-dlp/issues/723)), and Feeds (`:ytfav`, `:ytwatchlater`, `:ytsubs`, `:ythistory`, `:ytrec`, `:ytnotif`) + * Supports Clips, Stories (`ytstories:`), Search (including filters)**\***, YouTube Music Search, Channel-specific search, Search prefixes (`ytsearch:`, `ytsearchdate:`)**\***, Mixes, and Feeds (`:ytfav`, `:ytwatchlater`, `:ytsubs`, `:ythistory`, `:ytrec`, `:ytnotif`) * Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\*** * Supports some (but not all) age-gated content without cookies * Download livestreams from the start using `--live-from-start` (*experimental*) @@ -179,13 +179,13 @@ # INSTALLATION [![All versions](https://img.shields.io/badge/-All_Versions-lightgrey.svg?style=for-the-badge)](https://github.com/yt-dlp/yt-dlp/releases) -You can install yt-dlp using [the binaries](#release-files), [PIP](https://pypi.org/project/yt-dlp) or one using a third-party package manager. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/Installation) for detailed instructions +You can install yt-dlp using [the binaries](#release-files), [pip](https://pypi.org/project/yt-dlp) or one using a third-party package manager. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/Installation) for detailed instructions ## UPDATE You can use `yt-dlp -U` to update if you are using the [release binaries](#release-files) -If you [installed with PIP](https://github.com/yt-dlp/yt-dlp/wiki/Installation#with-pip), simply re-run the same command that was used to install the program +If you [installed with pip](https://github.com/yt-dlp/yt-dlp/wiki/Installation#with-pip), simply re-run the same command that was used to install the program For other third-party package managers, see [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/Installation#third-party-package-managers) or refer their documentation @@ -196,12 +196,15 @@ ## UPDATE The `nightly` channel has releases built after each push to the master branch, and will have the most recent fixes and additions, but also have more risk of regressions. They are available in [their own repo](https://github.com/yt-dlp/yt-dlp-nightly-builds/releases). When using `--update`/`-U`, a release binary will only update to its current channel. -This release channel can be changed by using the `--update-to` option. `--update-to` can also be used to upgrade or downgrade to specific tags from a channel. +`--update-to CHANNEL` can be used to switch to a different channel when a newer version is available. `--update-to [CHANNEL@]TAG` can also be used to upgrade or downgrade to specific tags from a channel. + +You may also use `--update-to ` (`/`) to update to a channel on a completely different repository. Be careful with what repository you are updating to though, there is no verification done for binaries from different repositories. Example usage: * `yt-dlp --update-to nightly` change to `nightly` channel and update to its latest release * `yt-dlp --update-to stable@2023.02.17` upgrade/downgrade to release to `stable` channel tag `2023.02.17` * `yt-dlp --update-to 2023.01.06` upgrade/downgrade to tag `2023.01.06` if it exists on the current channel +* `yt-dlp --update-to example/yt-dlp@2023.03.01` upgrade/downgrade to the release from the `example/yt-dlp` repository, tag `2023.03.01` ## RELEASE FILES @@ -360,10 +363,10 @@ ## General Options: -U, --update Update this program to the latest version --no-update Do not check for updates (default) --update-to [CHANNEL]@[TAG] Upgrade/downgrade to a specific version. - CHANNEL and TAG defaults to "stable" and - "latest" respectively if omitted; See - "UPDATE" for details. Supported channels: - stable, nightly + CHANNEL can be a repository as well. CHANNEL + and TAG default to "stable" and "latest" + respectively if omitted; See "UPDATE" for + details. Supported channels: stable, nightly -i, --ignore-errors Ignore download and postprocessing errors. The download will be considered successful even if the postprocessing fails @@ -409,7 +412,8 @@ ## General Options: configuration files --flat-playlist Do not extract the videos of a playlist, only list them - --no-flat-playlist Extract the videos of a playlist + --no-flat-playlist Fully extract the videos of a playlist + (default) --live-from-start Download livestreams from the start. Currently only supported for YouTube (Experimental) @@ -421,8 +425,12 @@ ## General Options: --no-wait-for-video Do not wait for scheduled streams (default) --mark-watched Mark videos watched (even with --simulate) --no-mark-watched Do not mark videos watched (default) - --no-colors Do not emit color codes in output (Alias: - --no-colours) + --color [STREAM:]POLICY Whether to emit color codes in output, + optionally prefixed by the STREAM (stdout or + stderr) to apply the setting to. Can be one + of "always", "auto" (default), "never", or + "no_color" (use non color terminal + sequences). Can be used multiple times --compat-options OPTS Options that can help keep compatibility with youtube-dl or youtube-dlc configurations by reverting some of the @@ -465,9 +473,9 @@ ## Geo-restriction: downloading --xff VALUE How to fake X-Forwarded-For HTTP header to try bypassing geographic restriction. One of - "default" (Only when known to be useful), - "never", a two-letter ISO 3166-2 country - code, or an IP block in CIDR notation + "default" (only when known to be useful), + "never", an IP block in CIDR notation, or a + two-letter ISO 3166-2 country code ## Video Selection: -I, --playlist-items ITEM_SPEC Comma separated playlist_index of the items @@ -514,7 +522,7 @@ ## Video Selection: dogs" (caseless). Use "--match-filter -" to interactively ask whether to download each video - --no-match-filter Do not use any --match-filter (default) + --no-match-filters Do not use any --match-filter (default) --break-match-filters FILTER Same as "--match-filters" but stops the download process when a video is rejected --no-break-match-filters Do not use any --break-match-filters (default) @@ -1709,7 +1717,7 @@ # MODIFYING METADATA This option also has a few special uses: -* You can download an additional URL based on the metadata of the currently downloaded video. To do this, set the field `additional_urls` to the URL that you want to download. E.g. `--parse-metadata "description:(?Phttps?://www\.vimeo\.com/\d+)` will download the first vimeo video found in the description +* You can download an additional URL based on the metadata of the currently downloaded video. To do this, set the field `additional_urls` to the URL that you want to download. E.g. `--parse-metadata "description:(?Phttps?://www\.vimeo\.com/\d+)"` will download the first vimeo video found in the description * You can use this to change the metadata that is embedded in the media file. To do this, set the value of the corresponding field with a `meta_` prefix. For example, any value you set to `meta_description` field will be added to the `description` field in the file - you can use this to set a different "description" and "synopsis". To modify the metadata of individual streams, use the `meta_` prefix (e.g. `meta1_language`). Any value set to the `meta_` field will overwrite all default values. @@ -1835,6 +1843,12 @@ #### rokfinchannel #### twitter * `legacy_api`: Force usage of the legacy Twitter API instead of the GraphQL API for tweet extraction. Has no effect if login cookies are passed +### wrestleuniverse +* `device_id`: UUID value assigned by the website and used to enforce device limits for paid livestream content. Can be found in browser local storage + +#### twitchstream (Twitch) +* `client_id`: Client ID value to be sent with GraphQL requests, e.g. `twitchstream:client_id=kimne78kx3ncx6brgo4mv6wki5h1ko` + **Note**: These options may be changed/removed in the future without concern for backward compatibility @@ -1880,7 +1894,7 @@ ## Installing Plugins * **System Plugins** * `/etc/yt-dlp/plugins//yt_dlp_plugins/` * `/etc/yt-dlp-plugins//yt_dlp_plugins/` -2. **Executable location**: Plugin packages can similarly be installed in a `yt-dlp-plugins` directory under the executable location: +2. **Executable location**: Plugin packages can similarly be installed in a `yt-dlp-plugins` directory under the executable location (recommended for portable installations): * Binary: where `/yt-dlp.exe`, `/yt-dlp-plugins//yt_dlp_plugins/` * Source: where `/yt_dlp/__main__.py`, `/yt-dlp-plugins//yt_dlp_plugins/` @@ -2068,7 +2082,7 @@ #### Use a custom format selector ```python import yt_dlp -URL = ['https://www.youtube.com/watch?v=BaW_jenozKc'] +URLS = ['https://www.youtube.com/watch?v=BaW_jenozKc'] def format_selector(ctx): """ Select the best video and the best audio that won't result in an mkv. @@ -2141,6 +2155,7 @@ #### Redundant options --playlist-end NUMBER -I :NUMBER --playlist-reverse -I ::-1 --no-playlist-reverse Default + --no-colors --color no_color #### Not recommended diff --git a/devscripts/cli_to_api.py b/devscripts/cli_to_api.py new file mode 100644 index 000000000..b8b7cbcf1 --- /dev/null +++ b/devscripts/cli_to_api.py @@ -0,0 +1,48 @@ +# Allow direct execution +import os +import sys + +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +import yt_dlp +import yt_dlp.options + +create_parser = yt_dlp.options.create_parser + + +def parse_patched_options(opts): + patched_parser = create_parser() + patched_parser.defaults.update({ + 'ignoreerrors': False, + 'retries': 0, + 'fragment_retries': 0, + 'extract_flat': False, + 'concat_playlist': 'never', + }) + yt_dlp.options.__dict__['create_parser'] = lambda: patched_parser + try: + return yt_dlp.parse_options(opts) + finally: + yt_dlp.options.__dict__['create_parser'] = create_parser + + +default_opts = parse_patched_options([]).ydl_opts + + +def cli_to_api(opts, cli_defaults=False): + opts = (yt_dlp.parse_options if cli_defaults else parse_patched_options)(opts).ydl_opts + + diff = {k: v for k, v in opts.items() if default_opts[k] != v} + if 'postprocessors' in diff: + diff['postprocessors'] = [pp for pp in diff['postprocessors'] + if pp not in default_opts['postprocessors']] + return diff + + +if __name__ == '__main__': + from pprint import pprint + + print('\nThe arguments passed translate to:\n') + pprint(cli_to_api(sys.argv[1:])) + print('\nCombining these with the CLI defaults gives:\n') + pprint(cli_to_api(sys.argv[1:], True)) diff --git a/devscripts/make_changelog.py b/devscripts/make_changelog.py index b159bc1b9..1b7e251ee 100644 --- a/devscripts/make_changelog.py +++ b/devscripts/make_changelog.py @@ -26,7 +26,6 @@ class CommitGroup(enum.Enum): - UPSTREAM = None PRIORITY = 'Important' CORE = 'Core' EXTRACTOR = 'Extractor' @@ -34,6 +33,11 @@ class CommitGroup(enum.Enum): POSTPROCESSOR = 'Postprocessor' MISC = 'Misc.' + @classmethod + @property + def ignorable_prefixes(cls): + return ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream') + @classmethod @lru_cache def commit_lookup(cls): @@ -41,7 +45,6 @@ def commit_lookup(cls): name: group for group, names in { cls.PRIORITY: {''}, - cls.UPSTREAM: {'upstream'}, cls.CORE: { 'aes', 'cache', @@ -54,6 +57,7 @@ def commit_lookup(cls): 'outtmpl', 'plugins', 'update', + 'upstream', 'utils', }, cls.MISC: { @@ -111,22 +115,36 @@ def key(self): return ((self.details or '').lower(), self.sub_details, self.message) +def unique(items): + return sorted({item.strip().lower(): item for item in items if item}.values()) + + class Changelog: MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE) + ALWAYS_SHOWN = (CommitGroup.PRIORITY,) - def __init__(self, groups, repo): + def __init__(self, groups, repo, collapsible=False): self._groups = groups self._repo = repo + self._collapsible = collapsible def __str__(self): return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ') def _format_groups(self, groups): + first = True for item in CommitGroup: + if self._collapsible and item not in self.ALWAYS_SHOWN and first: + first = False + yield '\n

Changelog

\n' + group = groups[item] if group: yield self.format_module(item.value, group) + if self._collapsible: + yield '\n
' + def format_module(self, name, group): result = f'\n#### {name} changes\n' if name else '\n' return result + '\n'.join(self._format_group(group)) @@ -137,62 +155,52 @@ def _format_group(self, group): for _, items in detail_groups: items = list(items) details = items[0].details - if not details: - indent = '' - else: - yield f'- {details}' - indent = '\t' if details == 'cleanup': - items, cleanup_misc_items = self._filter_cleanup_misc_items(items) + items = self._prepare_cleanup_misc_items(items) + + prefix = '-' + if details: + if len(items) == 1: + prefix = f'- **{details}**:' + else: + yield f'- **{details}**' + prefix = '\t-' sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details))) for sub_details, entries in sub_detail_groups: if not sub_details: for entry in entries: - yield f'{indent}- {self.format_single_change(entry)}' + yield f'{prefix} {self.format_single_change(entry)}' continue entries = list(entries) - prefix = f'{indent}- {", ".join(entries[0].sub_details)}' + sub_prefix = f'{prefix} {", ".join(entries[0].sub_details)}' if len(entries) == 1: - yield f'{prefix}: {self.format_single_change(entries[0])}' + yield f'{sub_prefix}: {self.format_single_change(entries[0])}' continue - yield prefix + yield sub_prefix for entry in entries: - yield f'{indent}\t- {self.format_single_change(entry)}' + yield f'\t{prefix} {self.format_single_change(entry)}' - if details == 'cleanup' and cleanup_misc_items: - yield from self._format_cleanup_misc_sub_group(cleanup_misc_items) - - def _filter_cleanup_misc_items(self, items): + def _prepare_cleanup_misc_items(self, items): cleanup_misc_items = defaultdict(list) - non_misc_items = [] + sorted_items = [] for item in items: if self.MISC_RE.search(item.message): cleanup_misc_items[tuple(item.commit.authors)].append(item) else: - non_misc_items.append(item) + sorted_items.append(item) - return non_misc_items, cleanup_misc_items + for commit_infos in cleanup_misc_items.values(): + sorted_items.append(CommitInfo( + 'cleanup', ('Miscellaneous',), ', '.join( + self._format_message_link(None, info.commit.hash) + for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')), + [], Commit(None, '', commit_infos[0].commit.authors), [])) - def _format_cleanup_misc_sub_group(self, group): - prefix = '\t- Miscellaneous' - if len(group) == 1: - yield f'{prefix}: {next(self._format_cleanup_misc_items(group))}' - return - - yield prefix - for message in self._format_cleanup_misc_items(group): - yield f'\t\t- {message}' - - def _format_cleanup_misc_items(self, group): - for authors, infos in group.items(): - message = ', '.join( - self._format_message_link(None, info.commit.hash) - for info in sorted(infos, key=lambda item: item.commit.hash or '')) - yield f'{message} by {self._format_authors(authors)}' + return sorted_items def format_single_change(self, info): message = self._format_message_link(info.message, info.commit.hash) @@ -236,12 +244,8 @@ class CommitRange: AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE) MESSAGE_RE = re.compile(r''' - (?:\[ - (?P[^\]\/:,]+) - (?:/(?P
[^\]:,]+))? - (?:[:,](?P[^\]]+))? - \]\ )? - (?:(?P`?[^:`]+`?): )? + (?:\[(?P[^\]]+)\]\ )? + (?:(?P`?[^:`]+`?): )? (?P.+?) (?:\ \((?P\#\d+(?:,\ \#\d+)*)\))? ''', re.VERBOSE | re.DOTALL) @@ -340,60 +344,76 @@ def apply_overrides(self, overrides): self._commits = {key: value for key, value in reversed(self._commits.items())} def groups(self): - groups = defaultdict(list) + group_dict = defaultdict(list) for commit in self: - upstream_re = self.UPSTREAM_MERGE_RE.match(commit.short) + upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short) if upstream_re: - commit.short = f'[upstream] Merge up to youtube-dl {upstream_re.group(1)}' + commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}' match = self.MESSAGE_RE.fullmatch(commit.short) if not match: logger.error(f'Error parsing short commit message: {commit.short!r}') continue - prefix, details, sub_details, sub_details_alt, message, issues = match.groups() - group = None - if prefix: - if prefix == 'priority': - prefix, _, details = (details or '').partition('/') - logger.debug(f'Priority: {message!r}') - group = CommitGroup.PRIORITY - - if not details and prefix: - if prefix not in ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream'): - logger.debug(f'Replaced details with {prefix!r}') - details = prefix or None - - if details == 'common': - details = None - - if details: - details = details.strip() - - else: - group = CommitGroup.CORE - - sub_details = f'{sub_details or ""},{sub_details_alt or ""}'.replace(':', ',') - sub_details = tuple(filter(None, map(str.strip, sub_details.split(',')))) - + prefix, sub_details_alt, message, issues = match.groups() issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else [] + if prefix: + groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(','))) + group = next(iter(filter(None, groups)), None) + details = ', '.join(unique(details)) + sub_details = list(itertools.chain.from_iterable(sub_details)) + else: + group = CommitGroup.CORE + details = None + sub_details = [] + + if sub_details_alt: + sub_details.append(sub_details_alt) + sub_details = tuple(unique(sub_details)) + if not group: - group = CommitGroup.get(prefix.lower()) - if not group: - if self.EXTRACTOR_INDICATOR_RE.search(commit.short): - group = CommitGroup.EXTRACTOR - else: - group = CommitGroup.POSTPROCESSOR - logger.warning(f'Failed to map {commit.short!r}, selected {group.name}') + if self.EXTRACTOR_INDICATOR_RE.search(commit.short): + group = CommitGroup.EXTRACTOR + else: + group = CommitGroup.POSTPROCESSOR + logger.warning(f'Failed to map {commit.short!r}, selected {group.name.lower()}') commit_info = CommitInfo( details, sub_details, message.strip(), issues, commit, self._fixes[commit.hash]) - logger.debug(f'Resolved {commit.short!r} to {commit_info!r}') - groups[group].append(commit_info) - return groups + logger.debug(f'Resolved {commit.short!r} to {commit_info!r}') + group_dict[group].append(commit_info) + + return group_dict + + @staticmethod + def details_from_prefix(prefix): + if not prefix: + return CommitGroup.CORE, None, () + + prefix, _, details = prefix.partition('/') + prefix = prefix.strip().lower() + details = details.strip() + + group = CommitGroup.get(prefix) + if group is CommitGroup.PRIORITY: + prefix, _, details = details.partition('/') + + if not details and prefix and prefix not in CommitGroup.ignorable_prefixes: + logger.debug(f'Replaced details with {prefix!r}') + details = prefix or None + + if details == 'common': + details = None + + if details: + details, *sub_details = details.split(':') + else: + sub_details = [] + + return group, details, sub_details def get_new_contributors(contributors_path, commits): @@ -444,6 +464,9 @@ def get_new_contributors(contributors_path, commits): parser.add_argument( '--repo', default='yt-dlp/yt-dlp', help='the github repository to use for the operations (default: %(default)s)') + parser.add_argument( + '--collapsible', action='store_true', + help='make changelog collapsible (default: %(default)s)') args = parser.parse_args() logging.basicConfig( @@ -467,4 +490,4 @@ def get_new_contributors(contributors_path, commits): write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a') logger.info(f'New contributors: {", ".join(new_contributors)}') - print(Changelog(commits.groups(), args.repo)) + print(Changelog(commits.groups(), args.repo, args.collapsible)) diff --git a/devscripts/update-version.py b/devscripts/update-version.py index d888be881..c873d10a5 100644 --- a/devscripts/update-version.py +++ b/devscripts/update-version.py @@ -51,7 +51,7 @@ def get_git_head(): if __name__ == '__main__': parser = argparse.ArgumentParser(description='Update the version.py file') parser.add_argument( - '-c', '--channel', choices=['stable', 'nightly'], default='stable', + '-c', '--channel', default='stable', help='Select update channel (default: %(default)s)') parser.add_argument( '-o', '--output', default='yt_dlp/version.py', diff --git a/setup.cfg b/setup.cfg index 6deaa7971..68d9e516d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,6 +8,7 @@ ignore = E402,E501,E731,E741,W503 max_line_length = 120 per_file_ignores = devscripts/lazy_load_template.py: F401 + yt_dlp/utils/__init__.py: F401, F403 [autoflake] diff --git a/test/helper.py b/test/helper.py index 0b90660ff..539b2f618 100644 --- a/test/helper.py +++ b/test/helper.py @@ -194,8 +194,8 @@ def sanitize_got_info_dict(got_dict): 'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries', # Auto-generated - 'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch', - 'fulltitle', 'extractor', 'extractor_key', 'filepath', 'infojson_filename', 'original_url', 'n_entries', + 'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch', 'n_entries', + 'fulltitle', 'extractor', 'extractor_key', 'filename', 'filepath', 'infojson_filename', 'original_url', # Only live_status needs to be checked 'is_live', 'was_live', diff --git a/test/test_YoutubeDL.py b/test/test_YoutubeDL.py index 3c26bd7c6..ee6c52713 100644 --- a/test/test_YoutubeDL.py +++ b/test/test_YoutubeDL.py @@ -10,7 +10,6 @@ import copy import json -import urllib.error from test.helper import FakeYDL, assertRegexpMatches from yt_dlp import YoutubeDL @@ -757,7 +756,7 @@ def expect_same_infodict(out): test('%(id)r %(height)r', "'1234' 1080") test('%(ext)s-%(ext|def)d', 'mp4-def') test('%(width|0)04d', '0000') - test('a%(width|)d', 'a', outtmpl_na_placeholder='none') + test('a%(width|b)d', 'ab', outtmpl_na_placeholder='none') FORMATS = self.outtmpl_info['formats'] sanitize = lambda x: x.replace(':', ':').replace('"', """).replace('\n', ' ') @@ -871,12 +870,12 @@ def test_postprocessors(self): class SimplePP(PostProcessor): def run(self, info): - with open(audiofile, 'wt') as f: + with open(audiofile, 'w') as f: f.write('EXAMPLE') return [info['filepath']], info def run_pp(params, PP): - with open(filename, 'wt') as f: + with open(filename, 'w') as f: f.write('EXAMPLE') ydl = YoutubeDL(params) ydl.add_post_processor(PP()) @@ -895,7 +894,7 @@ def run_pp(params, PP): class ModifierPP(PostProcessor): def run(self, info): - with open(info['filepath'], 'wt') as f: + with open(info['filepath'], 'w') as f: f.write('MODIFIED') return [], info @@ -1097,11 +1096,6 @@ def test_selection(params, expected_ids, evaluate_all=False): test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True) test_selection({'playlist_items': '-15::15'}, [], True) - def test_urlopen_no_file_protocol(self): - # see https://github.com/ytdl-org/youtube-dl/issues/8227 - ydl = YDL() - self.assertRaises(urllib.error.URLError, ydl.urlopen, 'file:///etc/passwd') - def test_do_not_override_ie_key_in_url_transparent(self): ydl = YDL() diff --git a/test/test_YoutubeDLCookieJar.py b/test/test_YoutubeDLCookieJar.py index 0d4e7dc97..2c73d7d85 100644 --- a/test/test_YoutubeDLCookieJar.py +++ b/test/test_YoutubeDLCookieJar.py @@ -11,7 +11,7 @@ import re import tempfile -from yt_dlp.utils import YoutubeDLCookieJar +from yt_dlp.cookies import YoutubeDLCookieJar class TestYoutubeDLCookieJar(unittest.TestCase): @@ -47,6 +47,12 @@ def test_malformed_cookies(self): # will be ignored self.assertFalse(cookiejar._cookies) + def test_get_cookie_header(self): + cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt') + cookiejar.load(ignore_discard=True, ignore_expires=True) + header = cookiejar.get_cookie_header('https://www.foobar.foobar') + self.assertIn('HTTPONLY_COOKIE', header) + if __name__ == '__main__': unittest.main() diff --git a/test/test_cookies.py b/test/test_cookies.py index 4155bcbf5..5282ef621 100644 --- a/test/test_cookies.py +++ b/test/test_cookies.py @@ -49,32 +49,38 @@ def test_get_desktop_environment(self): """ based on https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util_unittest.cc """ test_cases = [ ({}, _LinuxDesktopEnvironment.OTHER), + ({'DESKTOP_SESSION': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER), + ({'XDG_CURRENT_DESKTOP': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER), ({'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME), ({'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME), - ({'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE), - ({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE), + ({'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4), + ({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3), ({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE), ({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME), - ({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE), + ({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3), + ({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4), ({'XDG_CURRENT_DESKTOP': 'X-Cinnamon'}, _LinuxDesktopEnvironment.CINNAMON), + ({'XDG_CURRENT_DESKTOP': 'Deepin'}, _LinuxDesktopEnvironment.DEEPIN), ({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME), ({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME), ({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME), ({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME), - ({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE), - ({'XDG_CURRENT_DESKTOP': 'KDE'}, _LinuxDesktopEnvironment.KDE), + ({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5), + ({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '6'}, _LinuxDesktopEnvironment.KDE6), + ({'XDG_CURRENT_DESKTOP': 'KDE'}, _LinuxDesktopEnvironment.KDE4), ({'XDG_CURRENT_DESKTOP': 'Pantheon'}, _LinuxDesktopEnvironment.PANTHEON), + ({'XDG_CURRENT_DESKTOP': 'UKUI'}, _LinuxDesktopEnvironment.UKUI), ({'XDG_CURRENT_DESKTOP': 'Unity'}, _LinuxDesktopEnvironment.UNITY), ({'XDG_CURRENT_DESKTOP': 'Unity:Unity7'}, _LinuxDesktopEnvironment.UNITY), ({'XDG_CURRENT_DESKTOP': 'Unity:Unity8'}, _LinuxDesktopEnvironment.UNITY), ] for env, expected_desktop_environment in test_cases: - self.assertEqual(_get_linux_desktop_environment(env), expected_desktop_environment) + self.assertEqual(_get_linux_desktop_environment(env, Logger()), expected_desktop_environment) def test_chrome_cookie_decryptor_linux_derive_key(self): key = LinuxChromeCookieDecryptor.derive_key(b'abc') diff --git a/test/test_http.py b/test/test_http.py index 5ca0d7a47..3941a6e77 100644 --- a/test/test_http.py +++ b/test/test_http.py @@ -7,40 +7,190 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - +import gzip +import http.cookiejar import http.server +import io +import pathlib import ssl +import tempfile import threading +import urllib.error import urllib.request +import zlib from test.helper import http_server_port from yt_dlp import YoutubeDL +from yt_dlp.dependencies import brotli +from yt_dlp.utils import sanitized_Request, urlencode_postdata + +from .helper import FakeYDL TEST_DIR = os.path.dirname(os.path.abspath(__file__)) class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler): + protocol_version = 'HTTP/1.1' + def log_message(self, format, *args): pass + def _headers(self): + payload = str(self.headers).encode('utf-8') + self.send_response(200) + self.send_header('Content-Type', 'application/json') + self.send_header('Content-Length', str(len(payload))) + self.end_headers() + self.wfile.write(payload) + + def _redirect(self): + self.send_response(int(self.path[len('/redirect_'):])) + self.send_header('Location', '/method') + self.send_header('Content-Length', '0') + self.end_headers() + + def _method(self, method, payload=None): + self.send_response(200) + self.send_header('Content-Length', str(len(payload or ''))) + self.send_header('Method', method) + self.end_headers() + if payload: + self.wfile.write(payload) + + def _status(self, status): + payload = f'{status} NOT FOUND'.encode() + self.send_response(int(status)) + self.send_header('Content-Type', 'text/html; charset=utf-8') + self.send_header('Content-Length', str(len(payload))) + self.end_headers() + self.wfile.write(payload) + + def _read_data(self): + if 'Content-Length' in self.headers: + return self.rfile.read(int(self.headers['Content-Length'])) + + def do_POST(self): + data = self._read_data() + if self.path.startswith('/redirect_'): + self._redirect() + elif self.path.startswith('/method'): + self._method('POST', data) + elif self.path.startswith('/headers'): + self._headers() + else: + self._status(404) + + def do_HEAD(self): + if self.path.startswith('/redirect_'): + self._redirect() + elif self.path.startswith('/method'): + self._method('HEAD') + else: + self._status(404) + + def do_PUT(self): + data = self._read_data() + if self.path.startswith('/redirect_'): + self._redirect() + elif self.path.startswith('/method'): + self._method('PUT', data) + else: + self._status(404) + def do_GET(self): if self.path == '/video.html': + payload = b'