mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-04-29 20:15:53 +00:00
Compare commits
6 Commits
2a7e048a60
...
ab3ff2d5dd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab3ff2d5dd | ||
|
|
468aa6a9b4 | ||
|
|
6c918c5071 | ||
|
|
09078190b0 | ||
|
|
4a772e5289 | ||
|
|
f24b9ac0c9 |
1
.github/actionlint.yml
vendored
1
.github/actionlint.yml
vendored
@ -1,5 +1,4 @@
|
||||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
|
||||
84
.github/workflows/build.yml
vendored
84
.github/workflows/build.yml
vendored
@ -78,6 +78,7 @@ permissions: {}
|
||||
|
||||
jobs:
|
||||
process:
|
||||
name: Process
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||
@ -145,7 +146,6 @@ jobs:
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'qemu_platform': 'linux/arm/v7',
|
||||
'onefile': False,
|
||||
'cache_requirements': True,
|
||||
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||
}],
|
||||
'musllinux': [{
|
||||
@ -174,7 +174,6 @@ jobs:
|
||||
exe.setdefault('qemu_platform', None)
|
||||
exe.setdefault('onefile', True)
|
||||
exe.setdefault('onedir', True)
|
||||
exe.setdefault('cache_requirements', False)
|
||||
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||
@ -185,6 +184,7 @@ jobs:
|
||||
f.write(f'matrix={json.dumps(matrix)}')
|
||||
|
||||
unix:
|
||||
name: unix
|
||||
needs: [process]
|
||||
if: inputs.unix
|
||||
permissions:
|
||||
@ -197,12 +197,12 @@ jobs:
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@ -231,7 +231,7 @@ jobs:
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
@ -261,28 +261,16 @@ jobs:
|
||||
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Cache requirements
|
||||
if: matrix.cache_requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v5
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
venv
|
||||
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||
|
||||
- name: Set up QEMU
|
||||
if: matrix.qemu_platform
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
with:
|
||||
image: tonistiigi/binfmt:qemu-v10.0.4-56@sha256:30cc9a4d03765acac9be2ed0afc23af1ad018aed2c28ea4be8c2eb9afe03fbd1
|
||||
cache-image: false
|
||||
platforms: ${{ matrix.qemu_platform }}
|
||||
|
||||
- name: Build executable
|
||||
@ -306,7 +294,7 @@ jobs:
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||
path: |
|
||||
@ -314,6 +302,7 @@ jobs:
|
||||
compression-level: 0
|
||||
|
||||
macos:
|
||||
name: macos
|
||||
needs: [process]
|
||||
if: inputs.macos
|
||||
permissions:
|
||||
@ -326,25 +315,12 @@ jobs:
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v5
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||
cache-reqs-${{ github.job }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
@ -408,7 +384,7 @@ jobs:
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
@ -459,29 +435,15 @@ jobs:
|
||||
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
architecture: ${{ matrix.arch }}
|
||||
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
if: matrix.arch == 'arm64'
|
||||
uses: actions/cache@v5
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
/yt-dlp-build-venv
|
||||
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||
${{ env.BASE_CACHE_KEY }}-
|
||||
|
||||
- name: Install Requirements
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
@ -489,6 +451,8 @@ jobs:
|
||||
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
python -m venv /yt-dlp-build-venv
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m pip install -U pip
|
||||
@ -506,12 +470,16 @@ jobs:
|
||||
- name: Prepare
|
||||
shell: pwsh
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build
|
||||
shell: pwsh
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m bundle.pyinstaller
|
||||
python -m bundle.pyinstaller --onedir
|
||||
@ -521,6 +489,8 @@ jobs:
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
shell: pwsh
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
$name = "yt-dlp${Env:SUFFIX}"
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
@ -531,7 +501,7 @@ jobs:
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||
path: |
|
||||
@ -540,6 +510,7 @@ jobs:
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
name: Metadata files
|
||||
needs:
|
||||
- process
|
||||
- unix
|
||||
@ -550,13 +521,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Make SHA2-SUMS files
|
||||
shell: bash
|
||||
run: |
|
||||
cd ./artifact/
|
||||
# make sure SHA sums are also printed to stdout
|
||||
@ -618,7 +590,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-${{ github.job }}
|
||||
path: |
|
||||
|
||||
25
.github/workflows/cache-warmer.yml
vendored
25
.github/workflows/cache-warmer.yml
vendored
@ -1,25 +0,0 @@
|
||||
name: Keep cache warm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: |
|
||||
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||
permissions:
|
||||
contents: read
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
musllinux: false
|
||||
macos: true
|
||||
windows: true
|
||||
16
.github/workflows/challenge-tests.yml
vendored
16
.github/workflows/challenge-tests.yml
vendored
@ -37,28 +37,30 @@ jobs:
|
||||
env:
|
||||
QJS_VERSION: '2025-04-26' # Earliest version with rope strings
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
uses: denoland/setup-deno@e95548e56dfa95d4e1a28d6f422fafe75c4c26fb # v2.0.3
|
||||
with:
|
||||
deno-version: '2.0.0' # minimum supported version
|
||||
- name: Install Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
||||
with:
|
||||
# minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0
|
||||
bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }}
|
||||
no-cache: true
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '20.0' # minimum supported version
|
||||
- name: Install QuickJS (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
shell: bash
|
||||
run: |
|
||||
wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip
|
||||
unzip quickjs.zip qjs
|
||||
@ -67,15 +69,19 @@ jobs:
|
||||
if: matrix.os == 'windows-latest'
|
||||
shell: pwsh
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$PSNativeCommandUseErrorActionPreference = $true
|
||||
Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip
|
||||
unzip quickjs.zip
|
||||
- name: Install test requirements
|
||||
shell: bash
|
||||
run: |
|
||||
python ./devscripts/install_deps.py --print --omit-default --include-extra test > requirements.txt
|
||||
python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt
|
||||
python -m pip install -U -r requirements.txt
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
shell: bash
|
||||
run: |
|
||||
python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true
|
||||
python ./devscripts/run_tests.py test/test_jsc -k download
|
||||
|
||||
14
.github/workflows/codeql.yml
vendored
14
.github/workflows/codeql.yml
vendored
@ -11,14 +11,18 @@ on:
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: codeql-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
actions: read # Needed by github/codeql-action if repository is private
|
||||
contents: read
|
||||
security-events: write
|
||||
security-events: write # Needed to use github/codeql-action with Github Advanced Security
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@ -27,17 +31,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v4
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: none
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v4
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
4
.github/workflows/core.yml
vendored
4
.github/workflows/core.yml
vendored
@ -57,12 +57,12 @@ jobs:
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
|
||||
56
.github/workflows/download.yml
vendored
56
.github/workflows/download.yml
vendored
@ -1,56 +0,0 @@
|
||||
name: Download Tests
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
quick:
|
||||
name: Quick Download Tests
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python ./devscripts/install_deps.py --include-extra dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
||||
full:
|
||||
name: Full Download Tests
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl all')"
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python ./devscripts/install_deps.py --include-extra dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python ./devscripts/run_tests.py download
|
||||
2
.github/workflows/issue-lockdown.yml
vendored
2
.github/workflows/issue-lockdown.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
name: Issue Lockdown
|
||||
if: vars.ISSUE_LOCKDOWN
|
||||
permissions:
|
||||
issues: write
|
||||
issues: write # Needed to lock issues
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Lock new issue"
|
||||
|
||||
15
.github/workflows/quick-test.yml
vendored
15
.github/workflows/quick-test.yml
vendored
@ -3,6 +3,10 @@ on: [push, pull_request]
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: quick-test-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Core Test
|
||||
@ -11,17 +15,19 @@ jobs:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
shell: bash
|
||||
run: python ./devscripts/install_deps.py --omit-default --include-extra test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true
|
||||
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
@ -32,10 +38,10 @@ jobs:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install dev dependencies
|
||||
@ -47,4 +53,5 @@ jobs:
|
||||
- name: Run autopep8
|
||||
run: autopep8 --diff .
|
||||
- name: Check file mode
|
||||
shell: bash
|
||||
run: git ls-files --format="%(objectmode) %(path)" yt_dlp/ | ( ! grep -v "^100644" )
|
||||
|
||||
16
.github/workflows/release-master.yml
vendored
16
.github/workflows/release-master.yml
vendored
@ -19,30 +19,34 @@ permissions: {}
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Publish Github release
|
||||
if: vars.BUILD_MASTER
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
contents: write # May be needed to publish release
|
||||
id-token: write # Needed for trusted publishing
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||
target: 'master'
|
||||
secrets: inherit
|
||||
secrets:
|
||||
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
publish_pypi:
|
||||
name: Publish to PyPI
|
||||
needs: [release]
|
||||
if: vars.MASTER_PYPI_PROJECT
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
id-token: write # Needed for trusted publishing
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
verbose: true
|
||||
|
||||
20
.github/workflows/release-nightly.yml
vendored
20
.github/workflows/release-nightly.yml
vendored
@ -7,6 +7,7 @@ permissions: {}
|
||||
|
||||
jobs:
|
||||
check_nightly:
|
||||
name: Check for new commits
|
||||
if: vars.BUILD_NIGHTLY
|
||||
permissions:
|
||||
contents: read
|
||||
@ -14,12 +15,13 @@ jobs:
|
||||
outputs:
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Check for new commits
|
||||
id: check_for_new_commits
|
||||
shell: bash
|
||||
run: |
|
||||
relevant_files=(
|
||||
"yt_dlp/*.py"
|
||||
@ -36,31 +38,35 @@ jobs:
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
release:
|
||||
name: Publish Github release
|
||||
needs: [check_nightly]
|
||||
if: ${{ needs.check_nightly.outputs.commit }}
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
contents: write # May be needed to publish release
|
||||
id-token: write # Needed for trusted publishing
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||
target: 'nightly'
|
||||
secrets: inherit
|
||||
secrets:
|
||||
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
publish_pypi:
|
||||
name: Publish to PyPI
|
||||
needs: [release]
|
||||
if: vars.NIGHTLY_PYPI_PROJECT
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
id-token: write # Needed for trusted publishing
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
verbose: true
|
||||
|
||||
39
.github/workflows/release.yml
vendored
39
.github/workflows/release.yml
vendored
@ -22,6 +22,11 @@ on:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
secrets:
|
||||
ARCHIVE_REPO_TOKEN:
|
||||
required: false
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source:
|
||||
@ -60,25 +65,26 @@ permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write # Needed to git-push the release commit
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
channel: ${{ steps.setup_variables.outputs.channel }}
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
target_repo_token: ${{ steps.setup_variables.outputs.target_repo_token }}
|
||||
target_tag: ${{ steps.setup_variables.outputs.target_tag }}
|
||||
pypi_project: ${{ steps.setup_variables.outputs.pypi_project }}
|
||||
pypi_suffix: ${{ steps.setup_variables.outputs.pypi_suffix }}
|
||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: true # Needed to git-push the release commit
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||
|
||||
@ -103,8 +109,6 @@ jobs:
|
||||
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||
run: |
|
||||
python -m devscripts.setup_variables
|
||||
@ -149,6 +153,7 @@ jobs:
|
||||
run: git push origin "${GITHUB_EVENT_REF}"
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs: [prepare]
|
||||
permissions:
|
||||
contents: read
|
||||
@ -162,19 +167,20 @@ jobs:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
publish_pypi:
|
||||
name: Publish to PyPI
|
||||
needs: [prepare, build]
|
||||
if: ${{ needs.prepare.outputs.pypi_project }}
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # mandatory for trusted publishing
|
||||
id-token: write # Needed for trusted publishing
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@ -210,7 +216,7 @@ jobs:
|
||||
|
||||
- name: Upload artifacts
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: build-pypi
|
||||
path: |
|
||||
@ -219,14 +225,15 @@ jobs:
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
verbose: true
|
||||
|
||||
publish:
|
||||
name: Publish Github release
|
||||
needs: [prepare, build]
|
||||
permissions:
|
||||
contents: write
|
||||
contents: write # Needed by gh to publish release to Github
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
@ -234,16 +241,16 @@ jobs:
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: actions/download-artifact@v7
|
||||
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@ -284,7 +291,7 @@ jobs:
|
||||
|
||||
- name: Publish to archive repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||
|
||||
4
.github/workflows/sanitize-comment.yml
vendored
4
.github/workflows/sanitize-comment.yml
vendored
@ -11,8 +11,8 @@ jobs:
|
||||
name: Sanitize comment
|
||||
if: vars.SANITIZE_COMMENT && !github.event.issue.pull_request
|
||||
permissions:
|
||||
issues: write
|
||||
issues: write # Needed by yt-dlp/sanitize-comment to edit comments
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Sanitize comment
|
||||
uses: yt-dlp/sanitize-comment@v1
|
||||
uses: yt-dlp/sanitize-comment@4536c691101b89f5373d50fe8a7980cae146346b # v1.0.0
|
||||
|
||||
30
.github/workflows/test-workflows.yml
vendored
30
.github/workflows/test-workflows.yml
vendored
@ -1,14 +1,18 @@
|
||||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
paths:
|
||||
- .github/*.yml
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
branches: [master]
|
||||
paths:
|
||||
- .github/*.yml
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
@ -17,6 +21,10 @@ on:
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: test-workflows-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.9"
|
||||
ACTIONLINT_SHA256SUM: 233b280d05e100837f4af1433c7b40a5dcb306e3aa68fb4f17f8a7f45a7df7b4
|
||||
@ -29,15 +37,16 @@ jobs:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v6
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
shell: bash
|
||||
run: |
|
||||
python -m devscripts.install_deps --omit-default --include-extra test
|
||||
sudo apt -y install shellcheck
|
||||
@ -55,3 +64,20 @@ jobs:
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||
|
||||
zizmor:
|
||||
name: Run zizmor
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read # Needed by zizmorcore/zizmor-action if repository is private
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
|
||||
with:
|
||||
advanced-security: false
|
||||
persona: pedantic
|
||||
version: v1.19.0
|
||||
|
||||
15
.github/zizmor.yml
vendored
Normal file
15
.github/zizmor.yml
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
rules:
|
||||
concurrency-limits:
|
||||
ignore:
|
||||
- build.yml # Can only be triggered by maintainers or cronjob
|
||||
- issue-lockdown.yml # It *should* run for *every* new issue
|
||||
- release-nightly.yml # Can only be triggered by once-daily cronjob
|
||||
- release.yml # Can only be triggered by maintainers or cronjob
|
||||
- sanitize-comment.yml # It *should* run for *every* new comment/edit
|
||||
obfuscation:
|
||||
ignore:
|
||||
- release.yml # Not actual obfuscation
|
||||
unpinned-uses:
|
||||
config:
|
||||
policies:
|
||||
"*": hash-pin
|
||||
@ -26,7 +26,7 @@ services:
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:2025.12.19-1@sha256:b716645f9aecd0c1418283af930804bbdbd68a73d855a60101c5aab8548d737d
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
@ -61,7 +61,7 @@ services:
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:2025.12.19-1@sha256:36cbe6638c7c605c2b44a92e35751baa537ec8902112f790139d89c7e1ccd2a4
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
@ -97,7 +97,7 @@ services:
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye@sha256:9d544bf6ff73e36b8df1b7e415f6c8ee40ed84a0f3a26970cac8ea88b0ccf2ac
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
@ -132,7 +132,7 @@ services:
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
VERIFYIMAGE: alpine:3.23.2@sha256:865b95f46d98cf867a156fe4a135ad3fe50d2056aa3f25ed31662dff6da4eb62
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
@ -168,7 +168,7 @@ services:
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
VERIFYIMAGE: alpine:3.23.2@sha256:865b95f46d98cf867a156fe4a135ad3fe50d2056aa3f25ed31662dff6da4eb62
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
|
||||
@ -21,8 +21,6 @@ def setup_variables(environment):
|
||||
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||
HAS_ARCHIVE_REPO_TOKEN
|
||||
|
||||
`INPUTS` must contain these keys:
|
||||
@ -37,8 +35,6 @@ def setup_variables(environment):
|
||||
PROCESSED = json.loads(environment['PROCESSED'])
|
||||
|
||||
source_channel = None
|
||||
does_not_have_needed_token = False
|
||||
target_repo_token = None
|
||||
pypi_project = None
|
||||
pypi_suffix = None
|
||||
|
||||
@ -81,28 +77,19 @@ def setup_variables(environment):
|
||||
target_repo = REPOSITORY
|
||||
if target_repo != REPOSITORY:
|
||||
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_tag = source_tag or version
|
||||
if source_channel:
|
||||
target_repo = source_channel
|
||||
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_repo = REPOSITORY
|
||||
|
||||
if does_not_have_needed_token:
|
||||
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||
return None
|
||||
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||
if target_repo != REPOSITORY and not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
return None
|
||||
|
||||
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||
pypi_project = environment['PYPI_PROJECT'] or None
|
||||
@ -111,7 +98,6 @@ def setup_variables(environment):
|
||||
'channel': resolved_source,
|
||||
'version': version,
|
||||
'target_repo': target_repo,
|
||||
'target_repo_token': target_repo_token,
|
||||
'target_tag': target_tag,
|
||||
'pypi_project': pypi_project,
|
||||
'pypi_suffix': pypi_suffix,
|
||||
@ -147,6 +133,7 @@ if __name__ == '__main__':
|
||||
|
||||
outputs = setup_variables(dict(os.environ))
|
||||
if not outputs:
|
||||
print('::error::Repository access secret ARCHIVE_REPO_TOKEN not found')
|
||||
sys.exit(1)
|
||||
|
||||
print('::group::Output variables')
|
||||
|
||||
@ -9,8 +9,10 @@ import json
|
||||
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
GENERATE_TEST_DATA = object()
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected, ignore_revision=False):
|
||||
inp = inputs.copy()
|
||||
inp.setdefault('linux_armv7l', True)
|
||||
inp.setdefault('prerelease', False)
|
||||
@ -33,16 +35,19 @@ def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=Non
|
||||
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||
}
|
||||
|
||||
result = setup_variables(env)
|
||||
if not expected:
|
||||
|
||||
if expected is GENERATE_TEST_DATA:
|
||||
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||
return
|
||||
|
||||
if expected is None:
|
||||
assert result is None, f'expected error/None but got dict: {github_repository} {note}'
|
||||
return
|
||||
|
||||
exp = expected.copy()
|
||||
if ignore_revision:
|
||||
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||
@ -77,7 +82,6 @@ def test_setup_variables():
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': None,
|
||||
@ -91,7 +95,6 @@ def test_setup_variables():
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
@ -106,7 +109,6 @@ def test_setup_variables():
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
@ -120,7 +122,6 @@ def test_setup_variables():
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -135,7 +136,6 @@ def test_setup_variables():
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -149,7 +149,6 @@ def test_setup_variables():
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -163,7 +162,6 @@ def test_setup_variables():
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -175,7 +173,6 @@ def test_setup_variables():
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -186,7 +183,6 @@ def test_setup_variables():
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -201,7 +197,6 @@ def test_setup_variables():
|
||||
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'nightly',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -216,7 +211,6 @@ def test_setup_variables():
|
||||
'channel': f'{FORK_REPOSITORY}@master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'master',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -227,7 +221,6 @@ def test_setup_variables():
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -239,7 +232,6 @@ def test_setup_variables():
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -250,19 +242,16 @@ def test_setup_variables():
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||
FORK_REPOSITORY, 'fork, nightly', {
|
||||
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'PYPI_PROJECT': 'yt-dlp-test',
|
||||
}, {
|
||||
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
}, BASE_REPO_SECRETS, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
@ -270,19 +259,16 @@ def test_setup_variables():
|
||||
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||
FORK_REPOSITORY, 'fork, master', {
|
||||
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||
'MASTER_PYPI_SUFFIX': 'dev',
|
||||
}, {
|
||||
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
}, BASE_REPO_SECRETS, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
@ -290,7 +276,6 @@ def test_setup_variables():
|
||||
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp-test',
|
||||
'pypi_suffix': 'dev',
|
||||
@ -302,7 +287,6 @@ def test_setup_variables():
|
||||
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
@ -317,8 +301,15 @@ def test_setup_variables():
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars but no ARCHIVE_REPO_TOKEN, nightly',
|
||||
BASE_REPO_VARS, {}, {
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, None)
|
||||
|
||||
@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
from yt_dlp.extractor import IqiyiIE
|
||||
|
||||
|
||||
class WarningLogger:
|
||||
def __init__(self):
|
||||
self.messages = []
|
||||
|
||||
def warning(self, msg):
|
||||
self.messages.append(msg)
|
||||
|
||||
def debug(self, msg):
|
||||
pass
|
||||
|
||||
def error(self, msg):
|
||||
pass
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestIqiyiSDKInterpreter(unittest.TestCase):
|
||||
def test_iqiyi_sdk_interpreter(self):
|
||||
"""
|
||||
Test the functionality of IqiyiSDKInterpreter by trying to log in
|
||||
|
||||
If `sign` is incorrect, /validate call throws an HTTP 556 error
|
||||
"""
|
||||
logger = WarningLogger()
|
||||
ie = IqiyiIE(FakeYDL({'logger': logger}))
|
||||
ie._perform_login('foo', 'bar')
|
||||
self.assertTrue('unable to log in:' in logger.messages[0])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@ -1825,10 +1825,6 @@ from .scrippsnetworks import (
|
||||
ScrippsNetworksWatchIE,
|
||||
)
|
||||
from .scrolller import ScrolllerIE
|
||||
from .scte import (
|
||||
SCTEIE,
|
||||
SCTECourseIE,
|
||||
)
|
||||
from .sejmpl import SejmIE
|
||||
from .sen import SenIE
|
||||
from .senalcolombia import SenalColombiaLiveIE
|
||||
|
||||
@ -9,14 +9,12 @@ from .openload import PhantomJSwrapper
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
clean_html,
|
||||
decode_packed_codes,
|
||||
float_or_none,
|
||||
format_field,
|
||||
get_element_by_attribute,
|
||||
get_element_by_id,
|
||||
int_or_none,
|
||||
js_to_json,
|
||||
ohdave_rsa_encrypt,
|
||||
parse_age_limit,
|
||||
parse_duration,
|
||||
parse_iso8601,
|
||||
@ -33,143 +31,12 @@ def md5_text(text):
|
||||
return hashlib.md5(text.encode()).hexdigest()
|
||||
|
||||
|
||||
class IqiyiSDK:
|
||||
def __init__(self, target, ip, timestamp):
|
||||
self.target = target
|
||||
self.ip = ip
|
||||
self.timestamp = timestamp
|
||||
|
||||
@staticmethod
|
||||
def split_sum(data):
|
||||
return str(sum(int(p, 16) for p in data))
|
||||
|
||||
@staticmethod
|
||||
def digit_sum(num):
|
||||
if isinstance(num, int):
|
||||
num = str(num)
|
||||
return str(sum(map(int, num)))
|
||||
|
||||
def even_odd(self):
|
||||
even = self.digit_sum(str(self.timestamp)[::2])
|
||||
odd = self.digit_sum(str(self.timestamp)[1::2])
|
||||
return even, odd
|
||||
|
||||
def preprocess(self, chunksize):
|
||||
self.target = md5_text(self.target)
|
||||
chunks = []
|
||||
for i in range(32 // chunksize):
|
||||
chunks.append(self.target[chunksize * i:chunksize * (i + 1)])
|
||||
if 32 % chunksize:
|
||||
chunks.append(self.target[32 - 32 % chunksize:])
|
||||
return chunks, list(map(int, self.ip.split('.')))
|
||||
|
||||
def mod(self, modulus):
|
||||
chunks, ip = self.preprocess(32)
|
||||
self.target = chunks[0] + ''.join(str(p % modulus) for p in ip)
|
||||
|
||||
def split(self, chunksize):
|
||||
modulus_map = {
|
||||
4: 256,
|
||||
5: 10,
|
||||
8: 100,
|
||||
}
|
||||
|
||||
chunks, ip = self.preprocess(chunksize)
|
||||
ret = ''
|
||||
for i in range(len(chunks)):
|
||||
ip_part = str(ip[i] % modulus_map[chunksize]) if i < 4 else ''
|
||||
if chunksize == 8:
|
||||
ret += ip_part + chunks[i]
|
||||
else:
|
||||
ret += chunks[i] + ip_part
|
||||
self.target = ret
|
||||
|
||||
def handle_input16(self):
|
||||
self.target = md5_text(self.target)
|
||||
self.target = self.split_sum(self.target[:16]) + self.target + self.split_sum(self.target[16:])
|
||||
|
||||
def handle_input8(self):
|
||||
self.target = md5_text(self.target)
|
||||
ret = ''
|
||||
for i in range(4):
|
||||
part = self.target[8 * i:8 * (i + 1)]
|
||||
ret += self.split_sum(part) + part
|
||||
self.target = ret
|
||||
|
||||
def handleSum(self):
|
||||
self.target = md5_text(self.target)
|
||||
self.target = self.split_sum(self.target) + self.target
|
||||
|
||||
def date(self, scheme):
|
||||
self.target = md5_text(self.target)
|
||||
d = time.localtime(self.timestamp)
|
||||
strings = {
|
||||
'y': str(d.tm_year),
|
||||
'm': '%02d' % d.tm_mon,
|
||||
'd': '%02d' % d.tm_mday,
|
||||
}
|
||||
self.target += ''.join(strings[c] for c in scheme)
|
||||
|
||||
def split_time_even_odd(self):
|
||||
even, odd = self.even_odd()
|
||||
self.target = odd + md5_text(self.target) + even
|
||||
|
||||
def split_time_odd_even(self):
|
||||
even, odd = self.even_odd()
|
||||
self.target = even + md5_text(self.target) + odd
|
||||
|
||||
def split_ip_time_sum(self):
|
||||
chunks, ip = self.preprocess(32)
|
||||
self.target = str(sum(ip)) + chunks[0] + self.digit_sum(self.timestamp)
|
||||
|
||||
def split_time_ip_sum(self):
|
||||
chunks, ip = self.preprocess(32)
|
||||
self.target = self.digit_sum(self.timestamp) + chunks[0] + str(sum(ip))
|
||||
|
||||
|
||||
class IqiyiSDKInterpreter:
|
||||
def __init__(self, sdk_code):
|
||||
self.sdk_code = sdk_code
|
||||
|
||||
def run(self, target, ip, timestamp):
|
||||
self.sdk_code = decode_packed_codes(self.sdk_code)
|
||||
|
||||
functions = re.findall(r'input=([a-zA-Z0-9]+)\(input', self.sdk_code)
|
||||
|
||||
sdk = IqiyiSDK(target, ip, timestamp)
|
||||
|
||||
other_functions = {
|
||||
'handleSum': sdk.handleSum,
|
||||
'handleInput8': sdk.handle_input8,
|
||||
'handleInput16': sdk.handle_input16,
|
||||
'splitTimeEvenOdd': sdk.split_time_even_odd,
|
||||
'splitTimeOddEven': sdk.split_time_odd_even,
|
||||
'splitIpTimeSum': sdk.split_ip_time_sum,
|
||||
'splitTimeIpSum': sdk.split_time_ip_sum,
|
||||
}
|
||||
for function in functions:
|
||||
if re.match(r'mod\d+', function):
|
||||
sdk.mod(int(function[3:]))
|
||||
elif re.match(r'date[ymd]{3}', function):
|
||||
sdk.date(function[4:])
|
||||
elif re.match(r'split\d+', function):
|
||||
sdk.split(int(function[5:]))
|
||||
elif function in other_functions:
|
||||
other_functions[function]()
|
||||
else:
|
||||
raise ExtractorError(f'Unknown function {function}')
|
||||
|
||||
return sdk.target
|
||||
|
||||
|
||||
class IqiyiIE(InfoExtractor):
|
||||
IE_NAME = 'iqiyi'
|
||||
IE_DESC = '爱奇艺'
|
||||
|
||||
_VALID_URL = r'https?://(?:(?:[^.]+\.)?iqiyi\.com|www\.pps\.tv)/.+\.html'
|
||||
|
||||
_NETRC_MACHINE = 'iqiyi'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'http://www.iqiyi.com/v_19rrojlavg.html',
|
||||
# MD5 checksum differs on my machine and Travis CI
|
||||
@ -234,57 +101,6 @@ class IqiyiIE(InfoExtractor):
|
||||
'18': 7, # 1080p
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _rsa_fun(data):
|
||||
# public key extracted from http://static.iqiyi.com/js/qiyiV2/20160129180840/jobs/i18n/i18nIndex.js
|
||||
N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd
|
||||
e = 65537
|
||||
|
||||
return ohdave_rsa_encrypt(data, e, N)
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
|
||||
data = self._download_json(
|
||||
'http://kylin.iqiyi.com/get_token', None,
|
||||
note='Get token for logging', errnote='Unable to get token for logging')
|
||||
sdk = data['sdk']
|
||||
timestamp = int(time.time())
|
||||
target = (
|
||||
f'/apis/reglogin/login.action?lang=zh_TW&area_code=null&email={username}'
|
||||
f'&passwd={self._rsa_fun(password.encode())}&agenttype=1&from=undefined&keeplogin=0&piccode=&fromurl=&_pos=1')
|
||||
|
||||
interp = IqiyiSDKInterpreter(sdk)
|
||||
sign = interp.run(target, data['ip'], timestamp)
|
||||
|
||||
validation_params = {
|
||||
'target': target,
|
||||
'server': 'BEA3AA1908656AABCCFF76582C4C6660',
|
||||
'token': data['token'],
|
||||
'bird_src': 'f8d91d57af224da7893dd397d52d811a',
|
||||
'sign': sign,
|
||||
'bird_t': timestamp,
|
||||
}
|
||||
validation_result = self._download_json(
|
||||
'http://kylin.iqiyi.com/validate?' + urllib.parse.urlencode(validation_params), None,
|
||||
note='Validate credentials', errnote='Unable to validate credentials')
|
||||
|
||||
MSG_MAP = {
|
||||
'P00107': 'please login via the web interface and enter the CAPTCHA code',
|
||||
'P00117': 'bad username or password',
|
||||
}
|
||||
|
||||
code = validation_result['code']
|
||||
if code != 'A00000':
|
||||
msg = MSG_MAP.get(code)
|
||||
if not msg:
|
||||
msg = f'error {code}'
|
||||
if validation_result.get('msg'):
|
||||
msg += ': ' + validation_result['msg']
|
||||
self.report_warning('unable to log in: ' + msg)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_raw_data(self, tvid, video_id):
|
||||
tm = int(time.time() * 1000)
|
||||
|
||||
|
||||
@ -482,7 +482,7 @@ class NebulaChannelIE(NebulaBaseIE):
|
||||
|
||||
class NebulaSeasonIE(NebulaBaseIE):
|
||||
IE_NAME = 'nebula:season'
|
||||
_VALID_URL = rf'{_BASE_URL_RE}/(?P<series>[\w-]+)/season/(?P<season_number>\d+)'
|
||||
_VALID_URL = rf'{_BASE_URL_RE}/(?P<series>[\w-]+)/season/(?P<season_number>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://nebula.tv/jetlag/season/15',
|
||||
'info_dict': {
|
||||
@ -499,6 +499,14 @@ class NebulaSeasonIE(NebulaBaseIE):
|
||||
'description': 'md5:6da9040f1c2ac559579738bfb6919d1e',
|
||||
},
|
||||
'playlist_count': 8,
|
||||
}, {
|
||||
'url': 'https://nebula.tv/jetlag/season/13-5',
|
||||
'info_dict': {
|
||||
'id': 'jetlag_13-5',
|
||||
'title': 'Hide + Seek Across NYC',
|
||||
'description': 'md5:5b87bb9acc6dcdff289bb4c71a2ad59f',
|
||||
},
|
||||
'playlist_count': 3,
|
||||
}]
|
||||
|
||||
def _build_url_result(self, item):
|
||||
|
||||
@ -1,137 +0,0 @@
|
||||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
decode_packed_codes,
|
||||
urlencode_postdata,
|
||||
)
|
||||
|
||||
|
||||
class SCTEBaseIE(InfoExtractor):
|
||||
_LOGIN_URL = 'https://www.scte.org/SCTE/Sign_In.aspx'
|
||||
_NETRC_MACHINE = 'scte'
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
login_popup = self._download_webpage(
|
||||
self._LOGIN_URL, None, 'Downloading login popup')
|
||||
|
||||
def is_logged(webpage):
|
||||
return any(re.search(p, webpage) for p in (
|
||||
r'class=["\']welcome\b', r'>Sign Out<'))
|
||||
|
||||
# already logged in
|
||||
if is_logged(login_popup):
|
||||
return
|
||||
|
||||
login_form = self._hidden_inputs(login_popup)
|
||||
|
||||
login_form.update({
|
||||
'ctl01$TemplateBody$WebPartManager1$gwpciNewContactSignInCommon$ciNewContactSignInCommon$signInUserName': username,
|
||||
'ctl01$TemplateBody$WebPartManager1$gwpciNewContactSignInCommon$ciNewContactSignInCommon$signInPassword': password,
|
||||
'ctl01$TemplateBody$WebPartManager1$gwpciNewContactSignInCommon$ciNewContactSignInCommon$RememberMe': 'on',
|
||||
})
|
||||
|
||||
response = self._download_webpage(
|
||||
self._LOGIN_URL, None, 'Logging in',
|
||||
data=urlencode_postdata(login_form))
|
||||
|
||||
if '|pageRedirect|' not in response and not is_logged(response):
|
||||
error = self._html_search_regex(
|
||||
r'(?s)<[^>]+class=["\']AsiError["\'][^>]*>(.+?)</',
|
||||
response, 'error message', default=None)
|
||||
if error:
|
||||
raise ExtractorError(f'Unable to login: {error}', expected=True)
|
||||
raise ExtractorError('Unable to log in')
|
||||
|
||||
|
||||
class SCTEIE(SCTEBaseIE):
|
||||
_WORKING = False
|
||||
_VALID_URL = r'https?://learning\.scte\.org/mod/scorm/view\.php?.*?\bid=(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://learning.scte.org/mod/scorm/view.php?id=31484',
|
||||
'info_dict': {
|
||||
'title': 'Introduction to DOCSIS Engineering Professional',
|
||||
'id': '31484',
|
||||
},
|
||||
'playlist_count': 5,
|
||||
'skip': 'Requires account credentials',
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
title = self._search_regex(r'<h1>(.+?)</h1>', webpage, 'title')
|
||||
|
||||
context_id = self._search_regex(r'context-(\d+)', webpage, video_id)
|
||||
content_base = f'https://learning.scte.org/pluginfile.php/{context_id}/mod_scorm/content/8/'
|
||||
context = decode_packed_codes(self._download_webpage(
|
||||
f'{content_base}mobile/data.js', video_id))
|
||||
|
||||
data = self._parse_xml(
|
||||
self._search_regex(
|
||||
r'CreateData\(\s*"(.+?)"', context, 'data').replace(r"\'", "'"),
|
||||
video_id)
|
||||
|
||||
entries = []
|
||||
for asset in data.findall('.//asset'):
|
||||
asset_url = asset.get('url')
|
||||
if not asset_url or not asset_url.endswith('.mp4'):
|
||||
continue
|
||||
asset_id = self._search_regex(
|
||||
r'video_([^_]+)_', asset_url, 'asset id', default=None)
|
||||
if not asset_id:
|
||||
continue
|
||||
entries.append({
|
||||
'id': asset_id,
|
||||
'title': title,
|
||||
'url': content_base + asset_url,
|
||||
})
|
||||
|
||||
return self.playlist_result(entries, video_id, title)
|
||||
|
||||
|
||||
class SCTECourseIE(SCTEBaseIE):
|
||||
_WORKING = False
|
||||
_VALID_URL = r'https?://learning\.scte\.org/(?:mod/sub)?course/view\.php?.*?\bid=(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://learning.scte.org/mod/subcourse/view.php?id=31491',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://learning.scte.org/course/view.php?id=3639',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://learning.scte.org/course/view.php?id=3073',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
course_id = self._match_id(url)
|
||||
|
||||
webpage = self._download_webpage(url, course_id)
|
||||
|
||||
title = self._search_regex(
|
||||
r'<h1>(.+?)</h1>', webpage, 'title', default=None)
|
||||
|
||||
entries = []
|
||||
for mobj in re.finditer(
|
||||
r'''(?x)
|
||||
<a[^>]+
|
||||
href=(["\'])
|
||||
(?P<url>
|
||||
https?://learning\.scte\.org/mod/
|
||||
(?P<kind>scorm|subcourse)/view\.php?(?:(?!\1).)*?
|
||||
\bid=\d+
|
||||
)
|
||||
''',
|
||||
webpage):
|
||||
item_url = mobj.group('url')
|
||||
if item_url == url:
|
||||
continue
|
||||
ie = (SCTEIE.ie_key() if mobj.group('kind') == 'scorm'
|
||||
else SCTECourseIE.ie_key())
|
||||
entries.append(self.url_result(item_url, ie=ie))
|
||||
|
||||
return self.playlist_result(entries, course_id, title)
|
||||
@ -2437,7 +2437,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
|
||||
return info
|
||||
|
||||
def _comment_entries(self, root_continuation_data, ytcfg, video_id, parent=None, tracker=None):
|
||||
def _comment_entries(self, root_continuation_data, ytcfg, video_id, parent=None, tracker=None, depth=1):
|
||||
|
||||
get_single_config_arg = lambda c: self._configuration_arg(c, [''])[0]
|
||||
|
||||
@ -2469,15 +2469,15 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
break
|
||||
return _continuation
|
||||
|
||||
def extract_thread(contents, entity_payloads):
|
||||
if not parent:
|
||||
def extract_thread(contents, entity_payloads, thread_parent, thread_depth):
|
||||
if not thread_parent:
|
||||
tracker['current_page_thread'] = 0
|
||||
|
||||
if max_depth < tracker['current_depth']:
|
||||
if max_depth < thread_depth:
|
||||
return
|
||||
|
||||
for content in contents:
|
||||
if not parent and tracker['total_parent_comments'] >= max_parents:
|
||||
if not thread_parent and tracker['total_parent_comments'] >= max_parents:
|
||||
yield
|
||||
comment_thread_renderer = try_get(content, lambda x: x['commentThreadRenderer'])
|
||||
|
||||
@ -2487,7 +2487,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
(comment_thread_renderer, content), [['commentRenderer', ('comment', 'commentRenderer')]],
|
||||
expected_type=dict, default={})
|
||||
|
||||
comment = self._extract_comment_old(comment_renderer, parent)
|
||||
comment = self._extract_comment_old(comment_renderer, thread_parent)
|
||||
|
||||
# new comment format
|
||||
else:
|
||||
@ -2498,7 +2498,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
if not comment_keys:
|
||||
continue
|
||||
entities = traverse_obj(entity_payloads, lambda _, v: v['entityKey'] in comment_keys)
|
||||
comment = self._extract_comment(entities, parent)
|
||||
comment = self._extract_comment(entities, thread_parent)
|
||||
if comment:
|
||||
comment['is_pinned'] = traverse_obj(view_model, ('pinnedText', {str})) is not None
|
||||
|
||||
@ -2517,14 +2517,14 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
continue
|
||||
self.report_warning(
|
||||
'Detected YouTube comments looping. Stopping comment extraction '
|
||||
f'{"for this thread" if parent else ""} as we probably cannot get any more.')
|
||||
f'{"for this thread" if thread_parent else ""} as we probably cannot get any more.')
|
||||
yield
|
||||
break # Safeguard for recursive call in subthreads code path below
|
||||
else:
|
||||
tracker['seen_comment_ids'].add(comment['id'])
|
||||
tracker['seen_comment_ids'].add(comment_id)
|
||||
|
||||
tracker['running_total'] += 1
|
||||
tracker['total_reply_comments' if parent else 'total_parent_comments'] += 1
|
||||
tracker['total_reply_comments' if thread_parent else 'total_parent_comments'] += 1
|
||||
yield comment
|
||||
|
||||
# Attempt to get the replies
|
||||
@ -2536,24 +2536,20 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
'subThreads', lambda _, v: v['commentThreadRenderer']))
|
||||
# Recursively extract from `commentThreadRenderer`s in `subThreads`
|
||||
if subthreads:
|
||||
tracker['current_depth'] += 1
|
||||
for entry in extract_thread(subthreads, entity_payloads):
|
||||
for entry in extract_thread(subthreads, entity_payloads, comment_id, thread_depth + 1):
|
||||
if entry:
|
||||
yield entry
|
||||
tracker['current_depth'] -= 1
|
||||
# All of the subThreads' `continuationItemRenderer`s were within the nested
|
||||
# `commentThreadRenderer`s and are now exhausted, so avoid unnecessary recursion below
|
||||
continue
|
||||
|
||||
tracker['current_page_thread'] += 1
|
||||
tracker['current_depth'] += 1
|
||||
# Recursively extract from `continuationItemRenderer`s in `subThreads`
|
||||
comment_entries_iter = self._comment_entries(
|
||||
comment_replies_renderer, ytcfg, video_id,
|
||||
parent=comment_id, tracker=tracker)
|
||||
parent=comment_id, tracker=tracker, depth=thread_depth + 1)
|
||||
yield from itertools.islice(comment_entries_iter, min(
|
||||
max_replies_per_thread, max(0, max_replies - tracker['total_reply_comments'])))
|
||||
tracker['current_depth'] -= 1
|
||||
|
||||
# Keeps track of counts across recursive calls
|
||||
if not tracker:
|
||||
@ -2565,13 +2561,12 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
'total_reply_comments': 0,
|
||||
'seen_comment_ids': set(),
|
||||
'pinned_comment_ids': set(),
|
||||
'current_depth': 1,
|
||||
}
|
||||
|
||||
_max_comments, max_parents, max_replies, max_replies_per_thread, max_depth, *_ = (
|
||||
int_or_none(p, default=sys.maxsize) for p in self._configuration_arg('max_comments') + [''] * 5)
|
||||
|
||||
if max_depth < tracker['current_depth']:
|
||||
if max_depth < depth:
|
||||
return
|
||||
|
||||
continuation = self._extract_continuation(root_continuation_data)
|
||||
@ -2645,7 +2640,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
|
||||
break
|
||||
continue
|
||||
|
||||
for entry in extract_thread(continuation_items, mutations):
|
||||
for entry in extract_thread(continuation_items, mutations, parent, depth):
|
||||
if not entry:
|
||||
return
|
||||
yield entry
|
||||
|
||||
@ -4478,7 +4478,7 @@ def decode_packed_codes(code):
|
||||
symbol_table[base_n_count] = symbols[count] or base_n_count
|
||||
|
||||
return re.sub(
|
||||
r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
|
||||
r'\b(\w+)\b', lambda m: symbol_table.get(m.group(0), m.group(0)),
|
||||
obfuscated_code)
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user