Compare commits

..

No commits in common. "2d7b278666bfbf12cf287072498dd275c946b968" and "87eaf886f5a1fed00639baf3677ac76281cd98f9" have entirely different histories.

6 changed files with 25 additions and 100 deletions

View File

@ -86,7 +86,7 @@ jobs:
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.13" # Keep this in sync with test-workflows.yml
python-version: "3.10" # Keep this in sync with test-workflows.yml
- name: Process inputs
id: process_inputs
@ -180,7 +180,7 @@ jobs:
persist-credentials: false
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.13" # Keep this in sync with devscripts/update_bundle_requirements.py
python-version: "3.10"
- name: Install Requirements
run: |
@ -250,7 +250,7 @@ jobs:
merge-multiple: true
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.13"
python-version: "3.10"
- name: Generate release notes
env:

View File

@ -42,7 +42,7 @@ jobs:
persist-credentials: false
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.13" # Keep this in sync with release.yml's prepare job
python-version: "3.10" # Keep this in sync with release.yml's prepare job
- name: Install requirements
env:
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}

View File

@ -22,11 +22,8 @@ from devscripts.utils import run_process
REQUIREMENTS_PATH = pathlib.Path(__file__).parent.parent / 'bundle/requirements'
INPUT_TMPL = 'requirements-{}.in'
OUTPUT_TMPL = 'requirements-{}.txt'
COOLDOWN_DATE = (dt.datetime.today() - dt.timedelta(days=5)).strftime('%Y-%m-%d')
CUSTOM_COMPILE_COMMAND = 'python -m devscripts.update_bundle_requirements'
COOLDOWN_DATE = (dt.date.today() - dt.timedelta(days=7)).isoformat()
FUTURE_DATE = (dt.date.today() + dt.timedelta(days=1)).isoformat()
COOLDOWN_EXCEPTIONS = ('protobug', 'yt-dlp-ejs')
LINUX_GNU_PYTHON_VERSION = '3.13'
LINUX_MUSL_PYTHON_VERISON = '3.14'
@ -155,20 +152,14 @@ def write_requirements_input(filepath: pathlib.Path, *args: str) -> None:
def run_pip_compile(python_platform: str, python_version: str, requirements_input_path: pathlib.Path, *args: str) -> str:
return run_process(
'uv', 'pip', 'compile',
'--no-config',
'--quiet',
'--no-progress',
'--color=never',
'--upgrade',
f'--exclude-newer={COOLDOWN_DATE}',
*(f'--exclude-newer-package={package}={FUTURE_DATE}' for package in COOLDOWN_EXCEPTIONS),
f'--python-platform={python_platform}',
f'--python-version={python_version}',
'--generate-hashes',
'--no-strip-markers',
f'--custom-compile-command={CUSTOM_COMPILE_COMMAND}',
str(requirements_input_path),
'--format=requirements.txt',
*args)
@ -183,7 +174,7 @@ def main():
base_requirements_path.write_text(f'pyinstaller=={pyinstaller_version}\n')
pyinstaller_builds_deps = run_pip_compile(
target.platform, target.version, base_requirements_path,
'--no-emit-package=pyinstaller').stdout
'--color=never', '--no-emit-package=pyinstaller').stdout
requirements_path = REQUIREMENTS_PATH / OUTPUT_TMPL.format(target_suffix)
requirements_path.write_text(PYINSTALLER_BUILDS_TMPL.format(
pyinstaller_builds_deps, asset_info['browser_download_url'], asset_info['digest']))

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
from __future__ import annotations
import collections.abc
import contextlib
import io
import json
@ -19,9 +18,7 @@ HASHES = {{
{hash_mapping}
}}
'''
PACKAGE_NAME = 'yt-dlp-ejs'
PREFIX = f' "{PACKAGE_NAME}=='
PYPI_ARTIFACT_NAME = PACKAGE_NAME.replace('-', '_')
PREFIX = ' "yt-dlp-ejs=='
BASE_PATH = pathlib.Path(__file__).parent.parent
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
@ -35,58 +32,6 @@ ASSETS = {
'yt.solver.core.js': True,
}
MAKEFILE_PATH = BASE_PATH / 'Makefile'
REQUIREMENTS_PATH = BASE_PATH / 'bundle/requirements'
def requirements_needs_update(
lines: collections.abc.Iterable[str],
package: str,
version: str,
):
identifier = f'{package}=='
for line in lines:
if line.startswith(identifier):
return not line.removeprefix(identifier).startswith(version)
return False
def requirements_update(
lines: collections.abc.Iterable[str],
package: str,
new_version: str,
new_hashes: list[str],
):
first_comment = True
current = []
for line in lines:
if not line.endswith('\n'):
line += '\n'
if first_comment:
comment_line = line.strip()
if comment_line.startswith('#'):
yield line
continue
first_comment = False
yield '# It was later updated using devscripts/update_ejs.py\n'
current.append(line)
if line.endswith('\\\n'):
# continue logical line
continue
if not current[0].startswith(f'{package}=='):
yield from current
else:
yield f'{package}=={new_version} \\\n'
for digest in new_hashes[:-1]:
yield f' --hash={digest} \\\n'
yield f' --hash={new_hashes[-1]}\n'
current.clear()
def request(url: str):
@ -148,7 +93,7 @@ def main():
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
if not current_version:
print(f'{PACKAGE_NAME} dependency line could not be found')
print('yt-dlp-ejs dependency line could not be found')
return
makefile_info = makefile_variables(keys_only=True)
@ -165,36 +110,27 @@ def main():
version = info['tag_name']
if version == current_version:
print(f'{PACKAGE_NAME} is up to date! ({version})')
print(f'yt-dlp-ejs is up to date! ({version})')
return
print(f'Updating {PACKAGE_NAME} from {current_version} to {version}')
print(f'Updating yt-dlp-ejs from {current_version} to {version}')
hashes = []
requirements_hashes = []
wheel_info = {}
for asset in info['assets']:
name = asset['name']
digest = asset['digest']
# Is it the source distribution? If so, we only need its hash for the requirements files
if name == f'{PYPI_ARTIFACT_NAME}-{version}.tar.gz':
requirements_hashes.append(digest)
continue
is_wheel = name.startswith(f'{PYPI_ARTIFACT_NAME}-') and name.endswith('.whl')
is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl')
if not is_wheel and name not in ASSETS:
continue
with request(asset['browser_download_url']) as resp:
data = resp.read()
# verify digest from github
digest = asset['digest']
algo, _, expected = digest.partition(':')
hexdigest = hashlib.new(algo, data).hexdigest()
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
if is_wheel:
requirements_hashes.append(digest)
wheel_info = makefile_variables(version, name, digest, data)
continue
@ -225,12 +161,6 @@ def main():
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
MAKEFILE_PATH.write_text(makefile)
for req in REQUIREMENTS_PATH.glob('requirements-*.txt'):
lines = req.read_text().splitlines(True)
if requirements_needs_update(lines, PACKAGE_NAME, version):
with req.open(mode='w') as f:
f.writelines(requirements_update(lines, PACKAGE_NAME, version, requirements_hashes))
if __name__ == '__main__':
main()

View File

@ -47,8 +47,8 @@ dependencies = []
[project.optional-dependencies]
default = [
"brotli; implementation_name=='cpython' and sys_platform!='ios'",
"brotlicffi; implementation_name!='cpython' and sys_platform!='ios'",
"brotli; implementation_name=='cpython'",
"brotlicffi; implementation_name!='cpython'",
"certifi",
"mutagen",
"pycryptodomex",

View File

@ -439,27 +439,31 @@ class BandcampWeeklyIE(BandcampIE): # XXX: Do not subclass from concrete IE
def _real_extract(self, url):
show_id = self._match_id(url)
show_data = self._download_json(
audio_data = self._download_json(
'https://bandcamp.com/api/bcradio_api/1/get_show',
show_id, 'Downloading radio show JSON',
data=json.dumps({'id': show_id}).encode(),
headers={'Content-Type': 'application/json'})
audio_data = show_data['compiledTrack']
headers={'Content-Type': 'application/json'})['radioShowAudio']
stream_url = audio_data['streamUrl']
format_id = traverse_obj(stream_url, ({parse_qs}, 'enc', -1))
encoding, _, bitrate_str = (format_id or '').partition('-')
series_title = show_data.get('title')
release_timestamp = unified_timestamp(show_data.get('date'))
webpage = self._download_webpage(url, show_id, fatal=False)
metadata = traverse_obj(
self._extract_data_attr(webpage, show_id, 'blob', fatal=False),
('appData', 'shows', lambda _, v: str(v['showId']) == show_id, any)) or {}
series_title = audio_data.get('title') or metadata.get('title')
release_timestamp = unified_timestamp(audio_data.get('date')) or unified_timestamp(metadata.get('date'))
return {
'id': show_id,
'episode_id': show_id,
'title': join_nonempty(series_title, strftime_or_none(release_timestamp, '%Y-%m-%d'), delim=', '),
'series': series_title,
'thumbnail': format_field(show_data, 'imageId', 'https://f4.bcbits.com/img/%s_0.jpg', default=None),
'description': show_data.get('description'),
'thumbnail': format_field(metadata, 'imageId', 'https://f4.bcbits.com/img/%s_0.jpg', default=None),
'description': metadata.get('desc') or metadata.get('short_desc'),
'duration': float_or_none(audio_data.get('duration')),
'release_timestamp': release_timestamp,
'formats': [{