mirror of
https://github.com/yt-dlp/yt-dlp
synced 2025-12-17 22:55:42 +07:00
Compare commits
339 Commits
2025.04.30
...
2025.09.23
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4429fd0450 | ||
|
|
2e81e298cd | ||
|
|
7f5d9f8543 | ||
|
|
f8750504c2 | ||
|
|
8821682f15 | ||
|
|
08d7899683 | ||
|
|
98b6b0d339 | ||
|
|
bf5d18016b | ||
|
|
4bc19adc87 | ||
|
|
b2c01d0498 | ||
|
|
e123a48f11 | ||
|
|
820c6e2445 | ||
|
|
677997d84e | ||
|
|
b81e9272dc | ||
|
|
df4b4e8ccf | ||
|
|
f3829463c7 | ||
|
|
ae3923b6b2 | ||
|
|
8ab262c66b | ||
|
|
e2d37bcc8e | ||
|
|
eb4b3a5fc7 | ||
|
|
65e90aea29 | ||
|
|
17bfaa53ed | ||
|
|
8cb037c0b0 | ||
|
|
7d9e48b22a | ||
|
|
f5cb721185 | ||
|
|
83b8409366 | ||
|
|
ba80446855 | ||
|
|
22ea0688ed | ||
|
|
5c1abcdc49 | ||
|
|
3d9a88bd8e | ||
|
|
9def9a4b0e | ||
|
|
679587dac7 | ||
|
|
a1c98226a4 | ||
|
|
c8ede5f34d | ||
|
|
a183837ec8 | ||
|
|
067062bb87 | ||
|
|
8597a4331e | ||
|
|
48a214bef4 | ||
|
|
6a763a55d8 | ||
|
|
e6e6b51214 | ||
|
|
7c9b10ebc8 | ||
|
|
cd94e70040 | ||
|
|
7c27965ff6 | ||
|
|
50136eeeb3 | ||
|
|
603acdff07 | ||
|
|
d925e92b71 | ||
|
|
ed24640943 | ||
|
|
76bb46002c | ||
|
|
1e28f6bf74 | ||
|
|
0b51005b48 | ||
|
|
223baa81f6 | ||
|
|
18fe696df9 | ||
|
|
487a90c8ef | ||
|
|
8cd37b85d4 | ||
|
|
5c7ad68ff1 | ||
|
|
1ddbd033f0 | ||
|
|
fec30c56f0 | ||
|
|
d6950c27af | ||
|
|
3bd9154412 | ||
|
|
8f4a908300 | ||
|
|
f1ba9f4ddb | ||
|
|
5c8bcfdbc6 | ||
|
|
895e762a83 | ||
|
|
39b7b8ddc7 | ||
|
|
526410b4af | ||
|
|
f29acc4a6e | ||
|
|
4dbe96459d | ||
|
|
a03c37b44e | ||
|
|
fcea3edb5c | ||
|
|
415b6d9ca8 | ||
|
|
575753b9f3 | ||
|
|
c2fc4f3e7f | ||
|
|
07247d6c20 | ||
|
|
f63a7e41d1 | ||
|
|
7b8a8abb98 | ||
|
|
a97f4cb57e | ||
|
|
d154dc3dcf | ||
|
|
438d3f06b3 | ||
|
|
74b4b3b005 | ||
|
|
36e873822b | ||
|
|
d3d1ac8eb2 | ||
|
|
86d74e5cf0 | ||
|
|
6ca9165648 | ||
|
|
82a1390204 | ||
|
|
7540aa1da1 | ||
|
|
35da8df4f8 | ||
|
|
8df121ba59 | ||
|
|
471a2b60e0 | ||
|
|
df0553153e | ||
|
|
7bc53ae799 | ||
|
|
d8200ff0a4 | ||
|
|
0f6b915822 | ||
|
|
374ea049f5 | ||
|
|
6f4c1bb593 | ||
|
|
c22660aed5 | ||
|
|
404bd889d0 | ||
|
|
edf55e8184 | ||
|
|
8a8861d538 | ||
|
|
70f5669951 | ||
|
|
6ae3543d5a | ||
|
|
770119bdd1 | ||
|
|
8e3f8065af | ||
|
|
aea85d525e | ||
|
|
f2919bd28e | ||
|
|
681ed2153d | ||
|
|
bdeb3eb3f2 | ||
|
|
b7de89c910 | ||
|
|
5e4ceb35cf | ||
|
|
e98695549e | ||
|
|
bf366517ef | ||
|
|
c76ce28e06 | ||
|
|
e8d49b1c7f | ||
|
|
a6df5e8a58 | ||
|
|
e8d2807296 | ||
|
|
fe53ebe5b6 | ||
|
|
662af5bb83 | ||
|
|
8175f3738f | ||
|
|
1e0c77ddcc | ||
|
|
e651a53a2f | ||
|
|
f799a4b472 | ||
|
|
38c2bf4026 | ||
|
|
6ff135c319 | ||
|
|
cd31c319e3 | ||
|
|
6539ee1947 | ||
|
|
43dedbe639 | ||
|
|
05e553e9d1 | ||
|
|
1c6068af99 | ||
|
|
71f30921a2 | ||
|
|
121647705a | ||
|
|
70d7687487 | ||
|
|
42ca3d601e | ||
|
|
62e2a9c0d5 | ||
|
|
28b68f6875 | ||
|
|
682334e4b3 | ||
|
|
b831406a1d | ||
|
|
23c658b9cb | ||
|
|
cc5a5caac5 | ||
|
|
66aa21dc5a | ||
|
|
57186f958f | ||
|
|
daa1859be1 | ||
|
|
e8c2bf798b | ||
|
|
1fe83b0111 | ||
|
|
30302df22b | ||
|
|
3e609b2ced | ||
|
|
d399505fdf | ||
|
|
61d4cd0bc0 | ||
|
|
4385480795 | ||
|
|
485de69dbf | ||
|
|
0adeb1e54b | ||
|
|
afaf60d9fd | ||
|
|
7e3f48d64d | ||
|
|
59765ecbc0 | ||
|
|
c59ad2b066 | ||
|
|
eed94c7306 | ||
|
|
3e918d825d | ||
|
|
035b1ece8f | ||
|
|
9951fdd0d0 | ||
|
|
959ac99e98 | ||
|
|
d88b304d44 | ||
|
|
b15aa8d772 | ||
|
|
d3edc5d52a | ||
|
|
060c6a4501 | ||
|
|
6be26626f7 | ||
|
|
ef103b2d11 | ||
|
|
3e49bc8a1b | ||
|
|
2ac3eb9837 | ||
|
|
8820101aa3 | ||
|
|
a4561c7a66 | ||
|
|
32809eb2da | ||
|
|
f9dff95cb1 | ||
|
|
790c286ce3 | ||
|
|
87e3dc8c7f | ||
|
|
1a8474c3ca | ||
|
|
09982bc33e | ||
|
|
c8329fc572 | ||
|
|
1f27a9f8ba | ||
|
|
4919051e44 | ||
|
|
5f951ce929 | ||
|
|
28bf46b7da | ||
|
|
b8abd255e4 | ||
|
|
c1ac543c81 | ||
|
|
dcc4cba39e | ||
|
|
3a84be9d16 | ||
|
|
d42a6ff0c4 | ||
|
|
ade876efb3 | ||
|
|
7e0af2b1f0 | ||
|
|
d57a0b5aa7 | ||
|
|
6fb3947c0d | ||
|
|
9f54ea3898 | ||
|
|
07d1d85f63 | ||
|
|
5d693446e8 | ||
|
|
23e9389f93 | ||
|
|
6d39c420f7 | ||
|
|
85c3fa1925 | ||
|
|
b4b4486eff | ||
|
|
630f3389c3 | ||
|
|
a6db1d297a | ||
|
|
0f33950c77 | ||
|
|
b5fea53f20 | ||
|
|
5245231e4a | ||
|
|
3ae61e0f31 | ||
|
|
a5d697f62d | ||
|
|
6e5bee418b | ||
|
|
5b57b72c1a | ||
|
|
2aaf1aa71d | ||
|
|
7b4c96e089 | ||
|
|
0b359b184d | ||
|
|
805519bfaa | ||
|
|
aa9f1f4d57 | ||
|
|
fd36b8f31b | ||
|
|
99093e96fd | ||
|
|
7c49a93788 | ||
|
|
884f35d54a | ||
|
|
c23d837b65 | ||
|
|
a7113722ec | ||
|
|
0e68332bcb | ||
|
|
422cc8cb2f | ||
|
|
fca94ac5d6 | ||
|
|
b342d27f3f | ||
|
|
b6328ca050 | ||
|
|
0b41746964 | ||
|
|
c316416b97 | ||
|
|
e99c0b838a | ||
|
|
c2ff2dbaec | ||
|
|
ca5cce5b07 | ||
|
|
f3008bc5f8 | ||
|
|
30fa54280b | ||
|
|
b018784498 | ||
|
|
11b9416e10 | ||
|
|
35fc33fbc5 | ||
|
|
b16722ede8 | ||
|
|
500761e41a | ||
|
|
2ba5391cd6 | ||
|
|
e9f157669e | ||
|
|
958153a226 | ||
|
|
1b88384634 | ||
|
|
7b81634fb1 | ||
|
|
7e2504f941 | ||
|
|
4bd9a7ade7 | ||
|
|
b5bd057fe8 | ||
|
|
5e292baad6 | ||
|
|
0a6b104489 | ||
|
|
06c1a8cdff | ||
|
|
99b85ac102 | ||
|
|
eff0759705 | ||
|
|
1838a1ce5d | ||
|
|
2600849bad | ||
|
|
3bd3029160 | ||
|
|
a4ce4327c9 | ||
|
|
c57412d1f9 | ||
|
|
5b559d0072 | ||
|
|
8f94b76cbf | ||
|
|
ff6f94041a | ||
|
|
73bf102116 | ||
|
|
1722c55400 | ||
|
|
e6bd4a3da2 | ||
|
|
51887484e4 | ||
|
|
ba090caeaa | ||
|
|
339614a173 | ||
|
|
aa863ddab9 | ||
|
|
db162b76f6 | ||
|
|
e3c605a61f | ||
|
|
97ddfefeb4 | ||
|
|
a8bf0011bd | ||
|
|
13e5516271 | ||
|
|
03dba2012d | ||
|
|
5d96527be8 | ||
|
|
1fd0e88b67 | ||
|
|
231349786e | ||
|
|
f37d599a69 | ||
|
|
9e38b273b7 | ||
|
|
4e7c1ea346 | ||
|
|
e1b6062f8c | ||
|
|
c723c4e5e7 | ||
|
|
148a1eb4c5 | ||
|
|
85c8a405e3 | ||
|
|
943083edcd | ||
|
|
3fe72e9eea | ||
|
|
d30a49742c | ||
|
|
6d265388c6 | ||
|
|
a9b3700698 | ||
|
|
201812100f | ||
|
|
cc749a8a3b | ||
|
|
f7bbf5a617 | ||
|
|
b5be29fa58 | ||
|
|
6121559e02 | ||
|
|
2e5bf002da | ||
|
|
6693d66033 | ||
|
|
b094747e93 | ||
|
|
98f8eec956 | ||
|
|
0daddc780d | ||
|
|
2d7949d564 | ||
|
|
ed108b3ea4 | ||
|
|
eee90acc47 | ||
|
|
711c5d5d09 | ||
|
|
89c1b349ad | ||
|
|
0ee1102268 | ||
|
|
7794374de8 | ||
|
|
538eb30567 | ||
|
|
f8051e3a61 | ||
|
|
52f9729c9a | ||
|
|
1a8a03ea8d | ||
|
|
e0d6c08229 | ||
|
|
53ea743a9c | ||
|
|
415b4c9f95 | ||
|
|
7977b329ed | ||
|
|
e491fd4d09 | ||
|
|
32ed5f107c | ||
|
|
167d7a9f0f | ||
|
|
83fabf3524 | ||
|
|
00b1bec552 | ||
|
|
c7e575e316 | ||
|
|
31e090cb78 | ||
|
|
545c1a5b6f | ||
|
|
f569be4602 | ||
|
|
2685654a37 | ||
|
|
abf58dcd6a | ||
|
|
20f288bdc2 | ||
|
|
f475e8b529 | ||
|
|
41c0a1fb89 | ||
|
|
a7d9a5eb79 | ||
|
|
586b557b12 | ||
|
|
317f4b8006 | ||
|
|
6839276496 | ||
|
|
cbcfe6378d | ||
|
|
7dbb47f84f | ||
|
|
464c84fedf | ||
|
|
7a7b85c901 | ||
|
|
d880e06080 | ||
|
|
ded11ebc9a | ||
|
|
ea8498ed53 | ||
|
|
b26bc32579 | ||
|
|
f123cc83b3 | ||
|
|
0feec6dc13 | ||
|
|
1d0f6539c4 | ||
|
|
17cf9088d0 | ||
|
|
9064d2482d | ||
|
|
8f303afb43 | ||
|
|
5328eda882 |
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -22,6 +22,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -28,6 +28,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -16,6 +16,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -33,6 +33,7 @@ ### Before submitting a *pull request* make sure you have:
|
|||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
|
||||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||||
|
|||||||
28
.github/actionlint.yml
vendored
Normal file
28
.github/actionlint.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
self-hosted-runner:
|
||||||
|
labels:
|
||||||
|
# Workaround for the outdated runner list in actionlint v1.7.7
|
||||||
|
# Ref: https://github.com/rhysd/actionlint/issues/533
|
||||||
|
- windows-11-arm
|
||||||
|
|
||||||
|
config-variables:
|
||||||
|
- KEEP_CACHE_WARM
|
||||||
|
- PUSH_VERSION_COMMIT
|
||||||
|
- UPDATE_TO_VERIFICATION
|
||||||
|
- PYPI_PROJECT
|
||||||
|
- PYPI_SUFFIX
|
||||||
|
- NIGHTLY_PYPI_PROJECT
|
||||||
|
- NIGHTLY_PYPI_SUFFIX
|
||||||
|
- NIGHTLY_ARCHIVE_REPO
|
||||||
|
- BUILD_NIGHTLY
|
||||||
|
- MASTER_PYPI_PROJECT
|
||||||
|
- MASTER_PYPI_SUFFIX
|
||||||
|
- MASTER_ARCHIVE_REPO
|
||||||
|
- BUILD_MASTER
|
||||||
|
- ISSUE_LOCKDOWN
|
||||||
|
- SANITIZE_COMMENT
|
||||||
|
|
||||||
|
paths:
|
||||||
|
.github/workflows/build.yml:
|
||||||
|
ignore:
|
||||||
|
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||||
|
- '.+SC1090.+'
|
||||||
591
.github/workflows/build.yml
vendored
591
.github/workflows/build.yml
vendored
@@ -9,31 +9,27 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: stable
|
default: stable
|
||||||
type: string
|
type: string
|
||||||
|
origin:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
unix:
|
unix:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos_legacy:
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
windows:
|
windows:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows32:
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
origin:
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY:
|
GPG_SIGNING_KEY:
|
||||||
required: false
|
required: false
|
||||||
@@ -43,7 +39,9 @@ on:
|
|||||||
version:
|
version:
|
||||||
description: |
|
description: |
|
||||||
VERSION: yyyy.mm.dd[.rev] or rev
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
required: true
|
(default: auto-generated)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
channel:
|
channel:
|
||||||
description: |
|
description: |
|
||||||
@@ -55,37 +53,26 @@ on:
|
|||||||
description: yt-dlp, yt-dlp.tar.gz
|
description: yt-dlp, yt-dlp.tar.gz
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
description: yt-dlp_linux
|
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
description: yt-dlp_linux_armv7l.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
|
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
description: yt-dlp_macos, yt-dlp_macos.zip
|
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos_legacy:
|
|
||||||
description: yt-dlp_macos_legacy
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
windows:
|
windows:
|
||||||
description: yt-dlp.exe, yt-dlp_win.zip
|
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows32:
|
|
||||||
description: yt-dlp_x86.exe
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
origin:
|
|
||||||
description: Origin
|
|
||||||
required: false
|
|
||||||
default: 'current repo'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- 'current repo'
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -94,44 +81,151 @@ jobs:
|
|||||||
process:
|
process:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
origin: ${{ steps.process_origin.outputs.origin }}
|
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||||
|
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||||
|
version: ${{ steps.process_inputs.outputs.version }}
|
||||||
|
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Process origin
|
- name: Process inputs
|
||||||
id: process_origin
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
shell: python
|
||||||
run: |
|
run: |
|
||||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||||
|
version = INPUTS.get('version')
|
||||||
|
if version and '.' not in version:
|
||||||
|
# build.yml was dispatched with only a revision as the version input value
|
||||||
|
version_parts = [*timestamp.split('.')[:3], version]
|
||||||
|
elif not version:
|
||||||
|
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||||
|
version_parts = timestamp.split('.')[:4]
|
||||||
|
else:
|
||||||
|
# build.yml was called or dispatched with a complete version input value
|
||||||
|
version_parts = version.split('.')
|
||||||
|
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||||
|
outputs = {
|
||||||
|
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'version': '.'.join(version_parts),
|
||||||
|
}
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
|
||||||
|
- name: Build Linux matrix
|
||||||
|
id: linux_matrix
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
PYTHON_VERSION: '3.13'
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
EXE_MAP = {
|
||||||
|
'linux': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
}, {
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
}],
|
||||||
|
'linux_armv7l': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'armv7l',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
'qemu_platform': 'linux/arm/v7',
|
||||||
|
'onefile': False,
|
||||||
|
'cache_requirements': True,
|
||||||
|
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||||
|
}],
|
||||||
|
'musllinux': [{
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
}, {
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||||
|
if not matrix:
|
||||||
|
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||||
|
matrix = [EXE_MAP['linux'][0]]
|
||||||
|
for exe in matrix:
|
||||||
|
exe['exe'] = '_'.join(filter(None, (
|
||||||
|
'yt-dlp',
|
||||||
|
exe['os'],
|
||||||
|
exe['arch'] != 'x86_64' and exe['arch'],
|
||||||
|
)))
|
||||||
|
exe.setdefault('qemu_platform', None)
|
||||||
|
exe.setdefault('onefile', True)
|
||||||
|
exe.setdefault('onedir', True)
|
||||||
|
exe.setdefault('cache_requirements', False)
|
||||||
|
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||||
|
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||||
|
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||||
|
print('skipping linux job')
|
||||||
|
else:
|
||||||
|
print(json.dumps(matrix, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write(f'matrix={json.dumps(matrix)}')
|
||||||
|
|
||||||
unix:
|
unix:
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.unix
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Needed for changelog
|
fetch-depth: 0 # Needed for changelog
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt -y install zip pandoc man sed
|
sudo apt -y install zip pandoc man sed
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build Unix platform-independent binary
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all tar
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
run: |
|
run: |
|
||||||
chmod +x ./yt-dlp
|
chmod +x ./yt-dlp
|
||||||
cp ./yt-dlp ./yt-dlp_downgraded
|
cp ./yt-dlp ./yt-dlp_downgraded
|
||||||
version="$(./yt-dlp --version)"
|
version="$(./yt-dlp --version)"
|
||||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "${version}" != "${downgraded_version}" ]]
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -141,99 +235,74 @@ jobs:
|
|||||||
yt-dlp.tar.gz
|
yt-dlp.tar.gz
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
linux_static:
|
linux:
|
||||||
|
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||||
|
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.linux_static
|
runs-on: ${{ matrix.runner }}
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Build static executable
|
|
||||||
env:
|
|
||||||
channel: ${{ inputs.channel }}
|
|
||||||
origin: ${{ needs.process.outputs.origin }}
|
|
||||||
version: ${{ inputs.version }}
|
|
||||||
run: |
|
|
||||||
mkdir ~/build
|
|
||||||
cd bundle/docker
|
|
||||||
docker compose up --build static
|
|
||||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
chmod +x ~/build/yt-dlp_linux
|
|
||||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
|
||||||
version="$(~/build/yt-dlp_linux --version)"
|
|
||||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
~/build/yt-dlp_linux
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
linux_arm:
|
|
||||||
needs: process
|
|
||||||
if: inputs.linux_arm
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write # for creating cache
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
architecture:
|
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||||
- armv7
|
env:
|
||||||
- aarch64
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
EXE_NAME: ${{ matrix.exe }}
|
||||||
|
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||||
|
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||||
|
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||||
|
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
path: ./repo
|
|
||||||
- name: Virtualized Install, Prepare & Build
|
|
||||||
uses: yt-dlp/run-on-arch-action@v2
|
|
||||||
with:
|
|
||||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
|
||||||
env: |
|
|
||||||
GITHUB_WORKFLOW: build
|
|
||||||
githubToken: ${{ github.token }} # To cache image
|
|
||||||
arch: ${{ matrix.architecture }}
|
|
||||||
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS
|
|
||||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
|
||||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
|
||||||
apt update
|
|
||||||
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \
|
|
||||||
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment
|
|
||||||
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
|
|
||||||
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
|
|
||||||
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
|
|
||||||
'requests>=2.32.2,<3' 'urllib3>=1.26.17,<3' 'websockets>=13.0'
|
|
||||||
|
|
||||||
run: |
|
- name: Cache requirements
|
||||||
cd repo
|
if: matrix.cache_requirements
|
||||||
python3.9 devscripts/install_deps.py -o --include build
|
id: cache-venv
|
||||||
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
|
uses: actions/cache@v4
|
||||||
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
env:
|
||||||
python3.9 devscripts/make_lazy_extractors.py
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
python3.9 -m bundle.pyinstaller
|
with:
|
||||||
|
path: |
|
||||||
|
venv
|
||||||
|
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||||
|
|
||||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
- name: Set up QEMU
|
||||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
if: matrix.qemu_platform
|
||||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
uses: docker/setup-qemu-action@v3
|
||||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
with:
|
||||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
platforms: ${{ matrix.qemu_platform }}
|
||||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
- name: Build executable
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
env:
|
||||||
fi
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ./venv
|
||||||
|
mkdir -p ./dist
|
||||||
|
pushd bundle/docker
|
||||||
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
|
popd
|
||||||
|
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||||
|
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify executable in container
|
||||||
|
env:
|
||||||
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||||
|
run: |
|
||||||
|
cd bundle/docker
|
||||||
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: build-bin-linux_${{ matrix.architecture }}
|
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||||
path: | # run-on-arch-action designates armv7l as armv7
|
path: |
|
||||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
dist/${{ matrix.exe }}*
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
@@ -241,26 +310,35 @@ jobs:
|
|||||||
if: inputs.macos
|
if: inputs.macos
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
actions: write # For cleaning up cache
|
runs-on: macos-14
|
||||||
runs-on: macos-13
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
# NB: Building universal2 does not work with python from actions/setup-python
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
|
|
||||||
- name: Restore cached requirements
|
- name: Cache requirements
|
||||||
id: restore-cache
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache@v4
|
||||||
env:
|
env:
|
||||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/yt-dlp-build-venv
|
~/yt-dlp-build-venv
|
||||||
key: cache-reqs-${{ github.job }}
|
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ github.job }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
|
# We need to use system Python in order to roll our own universal2 curl_cffi wheel
|
||||||
|
brew uninstall --ignore-dependencies python3
|
||||||
python3 -m venv ~/yt-dlp-build-venv
|
python3 -m venv ~/yt-dlp-build-venv
|
||||||
source ~/yt-dlp-build-venv/bin/activate
|
source ~/yt-dlp-build-venv/bin/activate
|
||||||
python3 devscripts/install_deps.py -o --include build
|
python3 devscripts/install_deps.py -o --include build
|
||||||
@@ -299,7 +377,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -314,7 +392,7 @@ jobs:
|
|||||||
chmod +x ./dist/yt-dlp_macos
|
chmod +x ./dist/yt-dlp_macos
|
||||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||||
version="$(./dist/yt-dlp_macos --version)"
|
version="$(./dist/yt-dlp_macos --version)"
|
||||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
@@ -327,168 +405,124 @@ jobs:
|
|||||||
dist/yt-dlp_macos.zip
|
dist/yt-dlp_macos.zip
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
- name: Cleanup cache
|
|
||||||
if: steps.restore-cache.outputs.cache-hit == 'true'
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
cache_key: cache-reqs-${{ github.job }}
|
|
||||||
repository: ${{ github.repository }}
|
|
||||||
branch: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
gh extension install actions/gh-actions-cache
|
|
||||||
gh actions-cache delete "${cache_key}" -R "${repository}" -B "${branch}" --confirm
|
|
||||||
|
|
||||||
- name: Cache requirements
|
|
||||||
uses: actions/cache/save@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/yt-dlp-build-venv
|
|
||||||
key: cache-reqs-${{ github.job }}
|
|
||||||
|
|
||||||
macos_legacy:
|
|
||||||
needs: process
|
|
||||||
if: inputs.macos_legacy
|
|
||||||
runs-on: macos-13
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Install Python
|
|
||||||
# We need the official Python, because the GA ones only support newer macOS versions
|
|
||||||
env:
|
|
||||||
PYTHON_VERSION: 3.10.5
|
|
||||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
|
||||||
run: |
|
|
||||||
# Hack to get the latest patch version. Uncomment if needed
|
|
||||||
#brew install python@3.10
|
|
||||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
|
||||||
curl "https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg" -o "python.pkg"
|
|
||||||
sudo installer -pkg python.pkg -target /
|
|
||||||
python3 --version
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
brew install coreutils
|
|
||||||
python3 devscripts/install_deps.py --user -o --include build
|
|
||||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
|
||||||
python3 devscripts/make_lazy_extractors.py
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
python3 -m bundle.pyinstaller
|
|
||||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
chmod +x ./dist/yt-dlp_macos_legacy
|
|
||||||
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
|
||||||
version="$(./dist/yt-dlp_macos_legacy --version)"
|
|
||||||
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
dist/yt-dlp_macos_legacy
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
|
name: windows (${{ matrix.arch }})
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.windows
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
permissions:
|
||||||
|
contents: read
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: 'x64'
|
||||||
|
runner: windows-2025
|
||||||
|
python_version: '3.10'
|
||||||
|
platform_tag: win_amd64
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: b6496c7630c3afe66900cfa824e8234a8c2e2c81704bd7facd79586abc76c0e5
|
||||||
|
- arch: 'x86'
|
||||||
|
runner: windows-2025
|
||||||
|
python_version: '3.10'
|
||||||
|
platform_tag: win32
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: 2d881843580efdc54f3523507fc6d9c5b6051ee49c743a6d9b7003ac5758c226
|
||||||
|
- arch: 'arm64'
|
||||||
|
runner: windows-11-arm
|
||||||
|
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||||
|
platform_tag: win_arm64
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: 4250c9085e34a95c898f3ee2f764914fc36ec59f0d97c28e6a75fcf21f7b144f
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||||
|
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||||
|
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: ${{ matrix.python_version }}
|
||||||
|
architecture: ${{ matrix.arch }}
|
||||||
|
|
||||||
|
- name: Cache requirements
|
||||||
|
id: cache-venv
|
||||||
|
if: matrix.arch == 'arm64'
|
||||||
|
uses: actions/cache@v4
|
||||||
|
env:
|
||||||
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
/yt-dlp-build-venv
|
||||||
|
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||||
|
${{ env.BASE_CACHE_KEY }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
env:
|
||||||
|
ARCH: ${{ matrix.arch }}
|
||||||
|
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||||
|
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
python -m venv /yt-dlp-build-venv
|
||||||
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
|
python -m pip install -U pip
|
||||||
|
# Install custom PyInstaller build and verify hash
|
||||||
|
mkdir /pyi-wheels
|
||||||
|
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||||
|
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||||
python devscripts/install_deps.py -o --include build
|
python devscripts/install_deps.py -o --include build
|
||||||
python devscripts/install_deps.py --include curl-cffi
|
if ("${Env:ARCH}" -eq "x86") {
|
||||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-6.11.1-py3-none-any.whl"
|
python devscripts/install_deps.py
|
||||||
|
} else {
|
||||||
|
python devscripts/install_deps.py --include curl-cffi
|
||||||
|
}
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
python -m bundle.pyinstaller
|
python -m bundle.pyinstaller
|
||||||
python -m bundle.pyinstaller --onedir
|
python -m bundle.pyinstaller --onedir
|
||||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
foreach ($name in @("yt-dlp")) {
|
$name = "yt-dlp${Env:SUFFIX}"
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
$version = & "./dist/${name}.exe" --version
|
$version = & "./dist/${name}.exe" --version
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
if ($version -eq $downgraded_version) {
|
if ($version -eq $downgraded_version) {
|
||||||
exit 1
|
exit 1
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: build-bin-${{ github.job }}
|
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp.exe
|
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||||
dist/yt-dlp_win.zip
|
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
windows32:
|
|
||||||
needs: process
|
|
||||||
if: inputs.windows32
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
architecture: "x86"
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
python devscripts/install_deps.py -o --include build
|
|
||||||
python devscripts/install_deps.py
|
|
||||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-6.11.1-py3-none-any.whl"
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
|
||||||
python devscripts/make_lazy_extractors.py
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
python -m bundle.pyinstaller
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
foreach ($name in @("yt-dlp_x86")) {
|
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
|
||||||
$version = & "./dist/${name}.exe" --version
|
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
|
||||||
if ($version -eq $downgraded_version) {
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
dist/yt-dlp_x86.exe
|
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
meta_files:
|
meta_files:
|
||||||
@@ -496,12 +530,9 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- process
|
- process
|
||||||
- unix
|
- unix
|
||||||
- linux_static
|
- linux
|
||||||
- linux_arm
|
|
||||||
- macos
|
- macos
|
||||||
- macos_legacy
|
|
||||||
- windows
|
- windows
|
||||||
- windows32
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download artifacts
|
- name: Download artifacts
|
||||||
@@ -530,34 +561,38 @@ jobs:
|
|||||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lock 2024.10.22 py2exe .+
|
lock 2024.10.22 py2exe .+
|
||||||
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lock 2024.10.22 zip Python 3\.8
|
||||||
lock 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
|
||||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lock 2025.08.27 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 (?!\w+_exe).+ Python 3\.8
|
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 (?!\w+_exe).+ Python 3\.8
|
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Sign checksum files
|
- name: Sign checksum files
|
||||||
env:
|
env:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
if: env.GPG_SIGNING_KEY != ''
|
if: env.GPG_SIGNING_KEY
|
||||||
run: |
|
run: |
|
||||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
for signfile in ./SHA*SUMS; do
|
for signfile in ./SHA*SUMS; do
|
||||||
|
|||||||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Keep cache warm
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
if: |
|
||||||
|
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: '999999'
|
||||||
|
channel: stable
|
||||||
|
origin: ${{ github.repository }}
|
||||||
|
unix: false
|
||||||
|
linux: false
|
||||||
|
linux_armv7l: true
|
||||||
|
musllinux: false
|
||||||
|
macos: true
|
||||||
|
windows: true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
10
.github/workflows/core.yml
vendored
10
.github/workflows/core.yml
vendored
@@ -37,23 +37,27 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.9 is in quick-test
|
# CPython 3.9 is in quick-test
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.10]
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.11'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.12'
|
python-version: '3.12'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.10
|
python-version: '3.14-dev'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
|
|||||||
8
.github/workflows/download.yml
vendored
8
.github/workflows/download.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
@@ -28,17 +28,17 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.10]
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.10
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
|
|||||||
4
.github/workflows/quick-test.yml
vendored
4
.github/workflows/quick-test.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.9
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- name: Install dev dependencies
|
- name: Install dev dependencies
|
||||||
|
|||||||
13
.github/workflows/release-master.yml
vendored
13
.github/workflows/release-master.yml
vendored
@@ -6,10 +6,12 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "yt_dlp/**.py"
|
- "yt_dlp/**.py"
|
||||||
- "!yt_dlp/version.py"
|
- "!yt_dlp/version.py"
|
||||||
- "bundle/*.py"
|
- "bundle/**"
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
- "Makefile"
|
- "Makefile"
|
||||||
- ".github/workflows/build.yml"
|
- ".github/workflows/build.yml"
|
||||||
|
- ".github/workflows/release.yml"
|
||||||
|
- ".github/workflows/release-master.yml"
|
||||||
concurrency:
|
concurrency:
|
||||||
group: release-master
|
group: release-master
|
||||||
permissions:
|
permissions:
|
||||||
@@ -17,21 +19,20 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
if: vars.BUILD_MASTER != ''
|
if: vars.BUILD_MASTER
|
||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: master
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||||
|
target: 'master'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
publish_pypi:
|
publish_pypi:
|
||||||
needs: [release]
|
needs: [release]
|
||||||
if: vars.MASTER_PYPI_PROJECT != ''
|
if: vars.MASTER_PYPI_PROJECT
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|||||||
13
.github/workflows/release-nightly.yml
vendored
13
.github/workflows/release-nightly.yml
vendored
@@ -7,7 +7,7 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_nightly:
|
check_nightly:
|
||||||
if: vars.BUILD_NIGHTLY != ''
|
if: vars.BUILD_NIGHTLY
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
@@ -22,9 +22,13 @@ jobs:
|
|||||||
"yt_dlp/*.py"
|
"yt_dlp/*.py"
|
||||||
':!yt_dlp/version.py'
|
':!yt_dlp/version.py'
|
||||||
"bundle/*.py"
|
"bundle/*.py"
|
||||||
|
"bundle/docker/compose.yml"
|
||||||
|
"bundle/docker/linux/*"
|
||||||
"pyproject.toml"
|
"pyproject.toml"
|
||||||
"Makefile"
|
"Makefile"
|
||||||
".github/workflows/build.yml"
|
".github/workflows/build.yml"
|
||||||
|
".github/workflows/release.yml"
|
||||||
|
".github/workflows/release-nightly.yml"
|
||||||
)
|
)
|
||||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
@@ -34,17 +38,16 @@ jobs:
|
|||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: nightly
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||||
|
target: 'nightly'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
publish_pypi:
|
publish_pypi:
|
||||||
needs: [release]
|
needs: [release]
|
||||||
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
if: vars.NIGHTLY_PYPI_PROJECT
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|||||||
279
.github/workflows/release.yml
vendored
279
.github/workflows/release.yml
vendored
@@ -14,6 +14,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
prerelease:
|
prerelease:
|
||||||
required: false
|
required: false
|
||||||
default: true
|
default: true
|
||||||
@@ -43,6 +47,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
description: Include linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
prerelease:
|
prerelease:
|
||||||
description: Pre-release
|
description: Pre-release
|
||||||
default: false
|
default: false
|
||||||
@@ -71,141 +79,63 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||||
|
|
||||||
- name: Process inputs
|
- name: Process inputs
|
||||||
id: process_inputs
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
run: |
|
run: |
|
||||||
cat << EOF
|
python -m devscripts.setup_variables process_inputs
|
||||||
::group::Inputs
|
|
||||||
prerelease=${{ inputs.prerelease }}
|
|
||||||
source=${{ inputs.source }}
|
|
||||||
target=${{ inputs.target }}
|
|
||||||
version=${{ inputs.version }}
|
|
||||||
::endgroup::
|
|
||||||
EOF
|
|
||||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
|
||||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
|
||||||
cat << EOF >> "$GITHUB_OUTPUT"
|
|
||||||
source_repo=${source_repo}
|
|
||||||
source_tag=${source_tag}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Setup variables
|
- name: Setup variables
|
||||||
id: setup_variables
|
id: setup_variables
|
||||||
env:
|
env:
|
||||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||||
|
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||||
|
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
# unholy bash monstrosity (sincere apologies)
|
python -m devscripts.setup_variables
|
||||||
fallback_token () {
|
|
||||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
|
||||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
source_is_channel=0
|
- name: Update version & documentation
|
||||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
|
||||||
if [[ -z "${source_repo}" ]]; then
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
|
||||||
source_is_channel=1
|
|
||||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
|
||||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
|
||||||
source_tag="${source_repo}"
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
resolved_source="${source_repo}"
|
|
||||||
if [[ "${source_tag}" ]]; then
|
|
||||||
resolved_source="${resolved_source}@${source_tag}"
|
|
||||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
|
||||||
resolved_source='stable'
|
|
||||||
fi
|
|
||||||
|
|
||||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
|
||||||
version="$(
|
|
||||||
python devscripts/update-version.py \
|
|
||||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
|
||||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
|
||||||
|
|
||||||
if [[ "${target_repo}" ]]; then
|
|
||||||
if [[ -z "${target_tag}" ]]; then
|
|
||||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
else
|
|
||||||
target_tag="${target_repo}"
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
|
||||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
|
||||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
if ((source_is_channel)); then
|
|
||||||
target_repo="${source_channel}"
|
|
||||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
|
||||||
else
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
|
||||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::group::Output variables"
|
|
||||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
|
||||||
channel=${resolved_source}
|
|
||||||
version=${version}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_repo_token=${target_repo_token}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
pypi_project=${pypi_project}
|
|
||||||
pypi_suffix=${pypi_suffix}
|
|
||||||
EOF
|
|
||||||
echo "::endgroup::"
|
|
||||||
|
|
||||||
- name: Update documentation
|
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||||
if: |
|
REPOSITORY: ${{ github.repository }}
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
make doc
|
make doc
|
||||||
|
|
||||||
- name: Push to release
|
- name: Push to release
|
||||||
id: push_release
|
id: push_release
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||||
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git add -u
|
git add -u
|
||||||
git commit -m "Release ${{ env.version }}" \
|
git commit -m "Release ${VERSION}" \
|
||||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||||
|
|
||||||
- name: Get target commitish
|
- name: Get target commitish
|
||||||
id: get_target
|
id: get_target
|
||||||
@@ -214,10 +144,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Update master
|
- name: Update master
|
||||||
env:
|
env:
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: git push origin ${{ github.event.ref }}
|
run: git push origin "${GITHUB_EVENT_REF}"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: prepare
|
needs: prepare
|
||||||
@@ -226,10 +156,9 @@ jobs:
|
|||||||
version: ${{ needs.prepare.outputs.version }}
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
@@ -244,7 +173,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
@@ -255,16 +184,16 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
env:
|
env:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -298,7 +227,11 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
|
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -308,87 +241,85 @@ jobs:
|
|||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-*
|
pattern: build-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Generate release notes
|
- name: Generate release notes
|
||||||
env:
|
env:
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
BASE_REPO: yt-dlp/yt-dlp
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||||
|
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||||
|
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||||
run: |
|
run: |
|
||||||
printf '%s' \
|
printf '%s' \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
"(https://github.com/${BASE_REPO}/blob/master/Collaborators.md#collaborators \"Donate\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}' \
|
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
printf '%s' \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||||
printf '\n\n' >> ./RELEASE_NOTES
|
fi
|
||||||
cat >> ./RELEASE_NOTES << EOF
|
printf '\n\n%s\n\n%s%s\n\n---\n' \
|
||||||
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files)
|
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||||
---
|
"The PyInstaller-bundled executables are subject to the licenses described in " \
|
||||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||||
EOF
|
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
- name: Publish to archive repo
|
- name: Publish to archive repo
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||||
if: |
|
if: |
|
||||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
|
||||||
gh release create \
|
gh release create \
|
||||||
--notes-file ARCHIVE_NOTES \
|
--notes-file ARCHIVE_NOTES \
|
||||||
--title "${title} ${{ env.version }}" \
|
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||||
${{ env.version }} \
|
"${VERSION}" \
|
||||||
artifact/*
|
artifact/*
|
||||||
|
|
||||||
- name: Prune old release
|
- name: Prune old release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository && env.target_tag != env.version
|
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||||
run: |
|
run: |
|
||||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||||
git tag --delete "${{ env.target_tag }}" || true
|
git tag --delete "${TARGET_TAG}" || true
|
||||||
sleep 5 # Enough time to cover deletion race condition
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
- name: Publish release
|
- name: Publish release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository
|
env.TARGET_REPO == github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
gh_options=(
|
||||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
--notes-file "${NOTES_FILE}"
|
||||||
gh release create \
|
--target "${HEAD_SHA}"
|
||||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||||
--target ${{ env.head_sha }} \
|
)
|
||||||
--title "${title}${{ env.version }}" \
|
if ((PRERELEASE)); then
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
gh_options+=(--prerelease)
|
||||||
${{ env.target_tag }} \
|
fi
|
||||||
artifact/*
|
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||||
|
|||||||
41
.github/workflows/signature-tests.yml
vendored
Normal file
41
.github/workflows/signature-tests.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: Signature Tests
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/signature-tests.yml
|
||||||
|
- test/test_youtube_signature.py
|
||||||
|
- yt_dlp/jsinterp.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/signature-tests.yml
|
||||||
|
- test/test_youtube_signature.py
|
||||||
|
- yt_dlp/jsinterp.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: signature-tests-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
name: Signature Tests
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest]
|
||||||
|
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Install test requirements
|
||||||
|
run: python3 ./devscripts/install_deps.py --only-optional --include test
|
||||||
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
|
run: |
|
||||||
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
|
python3 ./devscripts/run_tests.py test/test_youtube_signature.py
|
||||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Test and lint workflows
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ACTIONLINT_VERSION: "1.7.7"
|
||||||
|
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
|
||||||
|
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check workflows
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||||
|
- name: Install requirements
|
||||||
|
env:
|
||||||
|
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||||
|
run: |
|
||||||
|
python -m devscripts.install_deps -o --include test
|
||||||
|
sudo apt -y install shellcheck
|
||||||
|
python -m pip install -U pyflakes
|
||||||
|
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||||
|
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||||
|
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||||
|
chmod +x actionlint
|
||||||
|
- name: Run actionlint
|
||||||
|
run: |
|
||||||
|
./actionlint -color
|
||||||
|
- name: Check Docker shell scripts
|
||||||
|
run: |
|
||||||
|
shellcheck bundle/docker/linux/*.sh
|
||||||
|
- name: Test GHA devscripts
|
||||||
|
run: |
|
||||||
|
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -105,6 +105,8 @@ README.txt
|
|||||||
*.zsh
|
*.zsh
|
||||||
*.spec
|
*.spec
|
||||||
test/testdata/sigs/player-*.js
|
test/testdata/sigs/player-*.js
|
||||||
|
test/testdata/thumbnails/empty.webp
|
||||||
|
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||||
|
|
||||||
# Binary
|
# Binary
|
||||||
/youtube-dl
|
/youtube-dl
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ # CONTRIBUTING TO YT-DLP
|
|||||||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||||
|
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||||
@@ -126,7 +127,7 @@ ### Are you willing to share account details if needed?
|
|||||||
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
||||||
|
|
||||||
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
||||||
- Change the password before sharing the account to something random (use [this](https://passwordsgenerator.net/) if you don't have a random password generator).
|
- Change the password before sharing the account to something random.
|
||||||
- Change the password after receiving the account back.
|
- Change the password after receiving the account back.
|
||||||
|
|
||||||
### Is the website primarily used for piracy?
|
### Is the website primarily used for piracy?
|
||||||
@@ -134,6 +135,17 @@ ### Is the website primarily used for piracy?
|
|||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||||
|
|
||||||
|
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||||
|
|
||||||
|
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||||
|
|
||||||
|
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||||
|
|
||||||
|
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||||
|
|
||||||
|
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||||
|
|
||||||
|
|
||||||
# DEVELOPER INSTRUCTIONS
|
# DEVELOPER INSTRUCTIONS
|
||||||
@@ -272,7 +284,7 @@ ## Adding support for a new site
|
|||||||
|
|
||||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.10. Backward compatibility is not required for even older versions of Python.
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@@ -768,12 +780,10 @@ #### Examples
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# My pull request is labeled pending-fixes
|
## My pull request is labeled pending-fixes
|
||||||
|
|
||||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# EMBEDDING YT-DLP
|
# EMBEDDING YT-DLP
|
||||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||||
|
|||||||
43
CONTRIBUTORS
43
CONTRIBUTORS
@@ -4,6 +4,7 @@ coletdjnz/colethedj (collaborator)
|
|||||||
Ashish0804 (collaborator)
|
Ashish0804 (collaborator)
|
||||||
bashonly (collaborator)
|
bashonly (collaborator)
|
||||||
Grub4K (collaborator)
|
Grub4K (collaborator)
|
||||||
|
seproDev (collaborator)
|
||||||
h-h-h-h
|
h-h-h-h
|
||||||
pauldubois98
|
pauldubois98
|
||||||
nixxo
|
nixxo
|
||||||
@@ -403,7 +404,6 @@ rebane2001
|
|||||||
road-master
|
road-master
|
||||||
rohieb
|
rohieb
|
||||||
sdht0
|
sdht0
|
||||||
seproDev
|
|
||||||
Hill-98
|
Hill-98
|
||||||
LXYan2333
|
LXYan2333
|
||||||
mushbite
|
mushbite
|
||||||
@@ -770,3 +770,44 @@ NeonMan
|
|||||||
pj47x
|
pj47x
|
||||||
troex
|
troex
|
||||||
WouterGordts
|
WouterGordts
|
||||||
|
baierjan
|
||||||
|
GeoffreyFrogeye
|
||||||
|
Pawka
|
||||||
|
v3DJG6GL
|
||||||
|
yozel
|
||||||
|
brian6932
|
||||||
|
iednod55
|
||||||
|
maxbin123
|
||||||
|
nullpos
|
||||||
|
anlar
|
||||||
|
eason1478
|
||||||
|
ceandreasen
|
||||||
|
chauhantirth
|
||||||
|
helpimnotdrowning
|
||||||
|
adamralph
|
||||||
|
averageFOSSenjoyer
|
||||||
|
bubo
|
||||||
|
flanter21
|
||||||
|
Georift
|
||||||
|
moonshinerd
|
||||||
|
R0hanW
|
||||||
|
ShockedPlot7560
|
||||||
|
swayll
|
||||||
|
atsushi2965
|
||||||
|
barryvan
|
||||||
|
injust
|
||||||
|
iribeirocampos
|
||||||
|
rolandcrosby
|
||||||
|
Sojiroh
|
||||||
|
tchebb
|
||||||
|
AzartX47
|
||||||
|
e2dk4r
|
||||||
|
junyilou
|
||||||
|
PierreMesure
|
||||||
|
Randalix
|
||||||
|
runarmod
|
||||||
|
gitchasing
|
||||||
|
zakaryan2004
|
||||||
|
cdce8p
|
||||||
|
nicolaasjan
|
||||||
|
willsmillie
|
||||||
|
|||||||
462
Changelog.md
462
Changelog.md
@@ -4,6 +4,468 @@ # Changelog
|
|||||||
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### 2025.09.23
|
||||||
|
|
||||||
|
#### Important changes
|
||||||
|
- **Several options have been deprecated**
|
||||||
|
In order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- **compat**: [Add `compat_datetime_from_timestamp`](https://github.com/yt-dlp/yt-dlp/commit/6a763a55d8a93b2a964ecf7699248ad342485412) ([#11902](https://github.com/yt-dlp/yt-dlp/issues/11902)) by [pzhlkj6612](https://github.com/pzhlkj6612), [seproDev](https://github.com/seproDev)
|
||||||
|
- **utils**
|
||||||
|
- `mimetype2ext`: [Recognize `vnd.dlna.mpeg-tts`](https://github.com/yt-dlp/yt-dlp/commit/98b6b0d339130e955f9d45ce67c0357c633c1627) ([#14388](https://github.com/yt-dlp/yt-dlp/issues/14388)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- `random_user_agent`: [Bump versions](https://github.com/yt-dlp/yt-dlp/commit/f3829463c728a5b5e62b3fc157e71c99b26edac7) ([#14317](https://github.com/yt-dlp/yt-dlp/issues/14317)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **10play**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/067062bb87ac057e453ce9efdac7ca117a6a7da0) ([#14242](https://github.com/yt-dlp/yt-dlp/issues/14242)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||||
|
- **applepodcast**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b2c01d0498653e0239c7226c5a7fcb614dd4dbc8) ([#14372](https://github.com/yt-dlp/yt-dlp/issues/14372)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **loco**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f5cb721185e8725cf4eb4080e86aa9aa73ef25b3) ([#14256](https://github.com/yt-dlp/yt-dlp/issues/14256)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **mitele**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/820c6e244571557fcfc127d4b3680e2d07c04dca) ([#14348](https://github.com/yt-dlp/yt-dlp/issues/14348)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **newspicks**: [Warn when only preview is available](https://github.com/yt-dlp/yt-dlp/commit/9def9a4b0e958285e055eb350e5dd43b5c423336) ([#14197](https://github.com/yt-dlp/yt-dlp/issues/14197)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **onsen**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/17bfaa53edf5c52fce73cf0cef4592f929c2462d) ([#10971](https://github.com/yt-dlp/yt-dlp/issues/10971)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **pixivsketch**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/3d9a88bd8ef149d781c7e569e48e61551eda395e) ([#14196](https://github.com/yt-dlp/yt-dlp/issues/14196)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **smotrim**: [Rework extractors](https://github.com/yt-dlp/yt-dlp/commit/8cb037c0b06c2815080f87d61ea2e95c412785fc) ([#14200](https://github.com/yt-dlp/yt-dlp/issues/14200)) by [doe1080](https://github.com/doe1080), [swayll](https://github.com/swayll)
|
||||||
|
- **telecinco**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/e123a48f1155703d8709a4221a42bd45c0a2b3ce) ([#14351](https://github.com/yt-dlp/yt-dlp/issues/14351)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **tiktok**: live: [Fix room ID extraction](https://github.com/yt-dlp/yt-dlp/commit/5c1abcdc49b9d23e1dcb77b95d063cf2bf93e352) ([#14287](https://github.com/yt-dlp/yt-dlp/issues/14287)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **ttinglive**: [Adapt FlexTV extractor to new domain](https://github.com/yt-dlp/yt-dlp/commit/4bc19adc8798e7564513898cf34adc432c6c5709) ([#14375](https://github.com/yt-dlp/yt-dlp/issues/14375)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **tunein**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/7d9e48b22a780c2e8d2d2d68940d49fd2029ab70) ([#13981](https://github.com/yt-dlp/yt-dlp/issues/13981)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **twitch**: clips: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f8750504c2f71b54586fb857d60dce4e354a13ea) ([#14397](https://github.com/yt-dlp/yt-dlp/issues/14397)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **vimeo**: [Fix login error handling](https://github.com/yt-dlp/yt-dlp/commit/679587dac7cd011a1472255e1f06efb017ba91b6) ([#14280](https://github.com/yt-dlp/yt-dlp/issues/14280)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **vk**
|
||||||
|
- [Support vksport URLs](https://github.com/yt-dlp/yt-dlp/commit/b81e9272dce5844e8fba371cb4b4fd95ad3ed819) ([#14341](https://github.com/yt-dlp/yt-dlp/issues/14341)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- uservideos: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/bf5d18016b03a3f2fd5d3494d9efe85d3f8beeac) ([#14376](https://github.com/yt-dlp/yt-dlp/issues/14376)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1c98226a4e869a34cc764a9dcf7a4558516308e) ([#14286](https://github.com/yt-dlp/yt-dlp/issues/14286)) by [nicolaasjan](https://github.com/nicolaasjan), [willsmillie](https://github.com/willsmillie) (With fixes in [677997d](https://github.com/yt-dlp/yt-dlp/commit/677997d84eaec0037397f7d935386daa3025b004) by [arand](https://github.com/arand), [thegymguy](https://github.com/thegymguy))
|
||||||
|
- **youtube**: [Force player `0004de42`](https://github.com/yt-dlp/yt-dlp/commit/7f5d9f8543d19590eeec9473d54fa00151afa78a) ([#14398](https://github.com/yt-dlp/yt-dlp/issues/14398)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**
|
||||||
|
- [Fix cache warmer](https://github.com/yt-dlp/yt-dlp/commit/8597a4331e8535a246d777bb8397bdcab251766c) ([#14261](https://github.com/yt-dlp/yt-dlp/issues/14261)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Post-release workflow cleanup](https://github.com/yt-dlp/yt-dlp/commit/cd94e7004036e0149d7d3fa236c7dd44cf460788) ([#14250](https://github.com/yt-dlp/yt-dlp/issues/14250)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Refactor Linux build jobs](https://github.com/yt-dlp/yt-dlp/commit/e2d37bcc8e84be9ce0f67fc24cb830c13963d10f) ([#14275](https://github.com/yt-dlp/yt-dlp/issues/14275)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use PyInstaller 6.16 for Windows](https://github.com/yt-dlp/yt-dlp/commit/df4b4e8ccf3385be6d2ad65465a0704c223dfdfb) ([#14318](https://github.com/yt-dlp/yt-dlp/issues/14318)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use SPDX license identifier](https://github.com/yt-dlp/yt-dlp/commit/48a214bef4bfd5984362d3d24b09dce50ba449ea) ([#14260](https://github.com/yt-dlp/yt-dlp/issues/14260)) by [cdce8p](https://github.com/cdce8p)
|
||||||
|
- [Use new PyInstaller builds for Windows](https://github.com/yt-dlp/yt-dlp/commit/c8ede5f34d6c95c442b936bb01ecbcb724aefdef) ([#14273](https://github.com/yt-dlp/yt-dlp/issues/14273)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **ci**
|
||||||
|
- [Bump actions/setup-python to v6](https://github.com/yt-dlp/yt-dlp/commit/22ea0688ed6bcdbe4c51401a84239cda3decfc9c) ([#14282](https://github.com/yt-dlp/yt-dlp/issues/14282)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Improve workflow checks](https://github.com/yt-dlp/yt-dlp/commit/ae3923b6b23bc62115be55510d6b5842f7a46b5f) ([#14316](https://github.com/yt-dlp/yt-dlp/issues/14316)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Test and lint workflows](https://github.com/yt-dlp/yt-dlp/commit/7c9b10ebc83907d37f9f65ea9d4bd6f5e3bd1371) ([#14249](https://github.com/yt-dlp/yt-dlp/issues/14249)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Test with Python 3.14](https://github.com/yt-dlp/yt-dlp/commit/83b8409366d0f9554eaeae56394b244dab64a2cb) ([#13468](https://github.com/yt-dlp/yt-dlp/issues/13468)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**
|
||||||
|
- [Bump ruff to 0.13.x](https://github.com/yt-dlp/yt-dlp/commit/ba8044685537e8e14adc6826fb4d730856fd2e2b) ([#14293](https://github.com/yt-dlp/yt-dlp/issues/14293)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Deprecate various options](https://github.com/yt-dlp/yt-dlp/commit/08d78996831bd8e1e3c2592d740c3def00bbf548) ([#13821](https://github.com/yt-dlp/yt-dlp/issues/13821)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Remove broken extractors](https://github.com/yt-dlp/yt-dlp/commit/65e90aea29cf3bfc9d1ae3e009fbf9a8db3a23c9) ([#14305](https://github.com/yt-dlp/yt-dlp/issues/14305)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Remove setup.cfg](https://github.com/yt-dlp/yt-dlp/commit/eb4b3a5fc7765a6cd0370ca44ccee0d7d5111dd7) ([#14314](https://github.com/yt-dlp/yt-dlp/issues/14314)) by [seproDev](https://github.com/seproDev) (With fixes in [8ab262c](https://github.com/yt-dlp/yt-dlp/commit/8ab262c66bd3e1d8874fb2d070068ba1f0d48f16) by [bashonly](https://github.com/bashonly))
|
||||||
|
- Miscellaneous: [2e81e29](https://github.com/yt-dlp/yt-dlp/commit/2e81e298cdce23afadb06a95836284acb38f7018) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [seproDev](https://github.com/seproDev)
|
||||||
|
- **docs**
|
||||||
|
- [Clarify license of PyInstaller-bundled executables](https://github.com/yt-dlp/yt-dlp/commit/e6e6b512141e66b1b36058966804fe59c02a2b4d) ([#14257](https://github.com/yt-dlp/yt-dlp/issues/14257)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Establish AI/LLM contribution policy](https://github.com/yt-dlp/yt-dlp/commit/8821682f15af59047bc1f92724ef8a9ba30d6f7e) ([#14194](https://github.com/yt-dlp/yt-dlp/issues/14194)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
- **test**: utils: [Fix `sanitize_path` test for Windows CPython 3.11](https://github.com/yt-dlp/yt-dlp/commit/a183837ec8bb5e28fe6eb3a9d77ea2d0d7a106bd) ([#13878](https://github.com/yt-dlp/yt-dlp/issues/13878)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
### 2025.09.05
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Fix `--id` deprecation warning](https://github.com/yt-dlp/yt-dlp/commit/76bb46002c9a9655f2b1d29d4840e75e79037cfa) ([#14190](https://github.com/yt-dlp/yt-dlp/issues/14190)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **charlierose**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/603acdff07f0226088916886002d2ad8309ff9d3) ([#14231](https://github.com/yt-dlp/yt-dlp/issues/14231)) by [gitchasing](https://github.com/gitchasing)
|
||||||
|
- **googledrive**: [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/18fe696df9d60804a8f5cb8cd74f38111d6eb711) ([#14139](https://github.com/yt-dlp/yt-dlp/issues/14139)) by [zakaryan2004](https://github.com/zakaryan2004)
|
||||||
|
- **itvbtcc**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0b51005b4819e7cea222fcbaf8e60391db4f732c) ([#14161](https://github.com/yt-dlp/yt-dlp/issues/14161)) by [garret1317](https://github.com/garret1317)
|
||||||
|
- **kick**: vod: [Support ongoing livestream VODs](https://github.com/yt-dlp/yt-dlp/commit/1e28f6bf743627b909135bb9a88537ad2deccaf0) ([#14154](https://github.com/yt-dlp/yt-dlp/issues/14154)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||||
|
- **lrt**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/ed24640943872c4cf30d7cc4601bec87b50ba03c) ([#14193](https://github.com/yt-dlp/yt-dlp/issues/14193)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **tver**: [Extract more metadata](https://github.com/yt-dlp/yt-dlp/commit/223baa81f6637dcdef108f817180d8d1ae9fa213) ([#14165](https://github.com/yt-dlp/yt-dlp/issues/14165)) by [arabcoders](https://github.com/arabcoders)
|
||||||
|
- **vevo**: [Restore extractors](https://github.com/yt-dlp/yt-dlp/commit/d925e92b710153d0d51d030f115b3c87226bc0f0) ([#14203](https://github.com/yt-dlp/yt-dlp/issues/14203)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**: [Overhaul Linux builds and refactor release workflow](https://github.com/yt-dlp/yt-dlp/commit/50136eeeb3767289b236f140b759f23b39b00888) ([#13997](https://github.com/yt-dlp/yt-dlp/issues/13997)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.08.27
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **generic**
|
||||||
|
- [Simplify invalid URL error message](https://github.com/yt-dlp/yt-dlp/commit/1ddbd033f0fd65917526b1271cea66913ac8647f) ([#14167](https://github.com/yt-dlp/yt-dlp/issues/14167)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Use https as fallback protocol](https://github.com/yt-dlp/yt-dlp/commit/fec30c56f0e97e573ace659104ff0d72c4cc9809) ([#14160](https://github.com/yt-dlp/yt-dlp/issues/14160)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **skeb**: [Support wav files](https://github.com/yt-dlp/yt-dlp/commit/d6950c27af31908363c5c815e3b7eb4f9ff41643) ([#14147](https://github.com/yt-dlp/yt-dlp/issues/14147)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **youtube**
|
||||||
|
- [Add `tcc` player JS variant](https://github.com/yt-dlp/yt-dlp/commit/8f4a908300f55054bc96814bceeaa1034fdf4110) ([#14134](https://github.com/yt-dlp/yt-dlp/issues/14134)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Deprioritize `web_safari` m3u8 formats](https://github.com/yt-dlp/yt-dlp/commit/5c7ad68ff1643ad80d18cef8be9db8fcab05ee6c) ([#14168](https://github.com/yt-dlp/yt-dlp/issues/14168)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/3bd91544122142a87863d79e54e995c26cfd7f92) ([#14135](https://github.com/yt-dlp/yt-dlp/issues/14135)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use alternative `tv` user-agent when authenticated](https://github.com/yt-dlp/yt-dlp/commit/8cd37b85d492edb56a4f7506ea05527b85a6b02b) ([#14169](https://github.com/yt-dlp/yt-dlp/issues/14169)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.08.22
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- **cookies**: [Fix `--cookies-from-browser` with Firefox 142+](https://github.com/yt-dlp/yt-dlp/commit/f29acc4a6e73a9dc091686d40951288acae5a46d) ([#14114](https://github.com/yt-dlp/yt-dlp/issues/14114)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K) (With fixes in [526410b](https://github.com/yt-dlp/yt-dlp/commit/526410b4af9c1ca73aa3503cdaf4d32e42308fd6) by [bashonly](https://github.com/bashonly))
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **mediaklikk**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/4dbe96459d7e632d397826d0bb323f3f0ac8b057) ([#13975](https://github.com/yt-dlp/yt-dlp/issues/13975)) by [zhallgato](https://github.com/zhallgato)
|
||||||
|
- **steam**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/fcea3edb5c5648638357f27431500c0aaf08b147) ([#14093](https://github.com/yt-dlp/yt-dlp/issues/14093)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **youtube**
|
||||||
|
- [Improve `tv` client context](https://github.com/yt-dlp/yt-dlp/commit/39b7b8ddc7a4d0669e0cf39105c3bb84cb2736cc) ([#14122](https://github.com/yt-dlp/yt-dlp/issues/14122)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Optimize playback wait times](https://github.com/yt-dlp/yt-dlp/commit/5c8bcfdbc638dfde13e93157637d8521413ed774) ([#14124](https://github.com/yt-dlp/yt-dlp/issues/14124)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Replace `ios` with `tv_simply` in default clients](https://github.com/yt-dlp/yt-dlp/commit/895e762a834bbd729ab822c7d17329fdf815aaf2) ([#14123](https://github.com/yt-dlp/yt-dlp/issues/14123)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Update `tv` client config](https://github.com/yt-dlp/yt-dlp/commit/a03c37b44ec8f50fd472c409115096f92410346d) ([#14101](https://github.com/yt-dlp/yt-dlp/issues/14101)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**: [Post-release workflow cleanup](https://github.com/yt-dlp/yt-dlp/commit/415b6d9ca868032a45b30b9139a50c5c06be2feb) ([#14090](https://github.com/yt-dlp/yt-dlp/issues/14090)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.08.20
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Warn against using `-f mp4`](https://github.com/yt-dlp/yt-dlp/commit/70f56699515e0854a4853d214dce11b61d432387) ([#13915](https://github.com/yt-dlp/yt-dlp/issues/13915)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **utils**: [Add improved `jwt_encode` function](https://github.com/yt-dlp/yt-dlp/commit/35da8df4f843cb8f0656a301e5bebbf47d64d69a) ([#14071](https://github.com/yt-dlp/yt-dlp/issues/14071)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- [Extract avif storyboard formats from MPD manifests](https://github.com/yt-dlp/yt-dlp/commit/770119bdd15c525ba4338503f0eb68ea4baedf10) ([#14016](https://github.com/yt-dlp/yt-dlp/issues/14016)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- `_rta_search`: [Do not assume `age_limit` is `0`](https://github.com/yt-dlp/yt-dlp/commit/6ae3543d5a1feea0c546571fd2782b024c108eac) ([#13985](https://github.com/yt-dlp/yt-dlp/issues/13985)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **adobetv**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/c22660aed5fadb4ac29bdf25db4e8016414153cc) ([#13917](https://github.com/yt-dlp/yt-dlp/issues/13917)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **bilibili**: [Handle Bangumi redirection](https://github.com/yt-dlp/yt-dlp/commit/6ca9165648ac9a07c012de639faf50a97cbe0991) ([#14038](https://github.com/yt-dlp/yt-dlp/issues/14038)) by [grqz](https://github.com/grqz), [junyilou](https://github.com/junyilou)
|
||||||
|
- **faulio**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/74b4b3b00516e92a60250e0626272a6826459057) ([#13907](https://github.com/yt-dlp/yt-dlp/issues/13907)) by [CasperMcFadden95](https://github.com/CasperMcFadden95)
|
||||||
|
- **francetv**: site: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/7b8a8abb98165a53c026e2a3f52faee608df1f20) ([#14082](https://github.com/yt-dlp/yt-dlp/issues/14082)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **medialaan**: [Rework extractors](https://github.com/yt-dlp/yt-dlp/commit/86d74e5cf0e06c53c931ccdbdd497e3f2c4d2fe2) ([#14015](https://github.com/yt-dlp/yt-dlp/issues/14015)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **mtv**: [Overhaul extractors](https://github.com/yt-dlp/yt-dlp/commit/8df121ba59208979aa713822781891347abd03d1) ([#14052](https://github.com/yt-dlp/yt-dlp/issues/14052)) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [Randalix](https://github.com/Randalix), [seproDev](https://github.com/seproDev)
|
||||||
|
- **niconico**: live: [Support age-restricted streams](https://github.com/yt-dlp/yt-dlp/commit/374ea049f531959bcccf8a1e6bc5659d228a780e) ([#13549](https://github.com/yt-dlp/yt-dlp/issues/13549)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **nrktvepisode**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/7540aa1da1800769af40381f423825a1a8826377) ([#14065](https://github.com/yt-dlp/yt-dlp/issues/14065)) by [runarmod](https://github.com/runarmod)
|
||||||
|
- **puhutv**: [Fix playlists extraction](https://github.com/yt-dlp/yt-dlp/commit/36e873822bdb2c5aba3780dd3ae32cbae564c6cd) ([#11955](https://github.com/yt-dlp/yt-dlp/issues/11955)) by [e2dk4r](https://github.com/e2dk4r)
|
||||||
|
- **steam**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/d3d1ac8eb2f9e96f3d75292e0effe2b1bccece3b) ([#14008](https://github.com/yt-dlp/yt-dlp/issues/14008)) by [AzartX47](https://github.com/AzartX47)
|
||||||
|
- **svt**: [Extract forced subs under separate lang code](https://github.com/yt-dlp/yt-dlp/commit/82a139020417a501f261d9fe02cefca01b1e12e4) ([#14062](https://github.com/yt-dlp/yt-dlp/issues/14062)) by [PierreMesure](https://github.com/PierreMesure)
|
||||||
|
- **tiktok**: user: [Avoid infinite loop during extraction](https://github.com/yt-dlp/yt-dlp/commit/edf55e81842fcfa6c302528d7f33ccd5081b37ef) ([#14032](https://github.com/yt-dlp/yt-dlp/issues/14032)) by [bashonly](https://github.com/bashonly) (With fixes in [471a2b6](https://github.com/yt-dlp/yt-dlp/commit/471a2b60e0a3e056960d9ceb1ebf57908428f752))
|
||||||
|
- **vimeo**
|
||||||
|
- album: [Support embed-only and non-numeric albums](https://github.com/yt-dlp/yt-dlp/commit/d8200ff0a4699e06c9f7daca8f8531f8b98e68f2) ([#14021](https://github.com/yt-dlp/yt-dlp/issues/14021)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- event: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0f6b915822fb64bd944126fdacd401975c9f06ed) ([#14064](https://github.com/yt-dlp/yt-dlp/issues/14064)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **weibo**
|
||||||
|
- [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/8e3f8065af1415caeff788c5c430703dd0d8f576) ([#14012](https://github.com/yt-dlp/yt-dlp/issues/14012)) by [AzartX47](https://github.com/AzartX47), [bashonly](https://github.com/bashonly)
|
||||||
|
- [Support more URLs and --no-playlist](https://github.com/yt-dlp/yt-dlp/commit/404bd889d0e0b62ad72b7281e3fefdc0497080b3) ([#14035](https://github.com/yt-dlp/yt-dlp/issues/14035)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **youtube**
|
||||||
|
- [Add `es5` and `es6` player JS variants](https://github.com/yt-dlp/yt-dlp/commit/f2919bd28eac905f1267c62b83738a02bb5b4e04) ([#14005](https://github.com/yt-dlp/yt-dlp/issues/14005)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Add `playback_wait` extractor-arg](https://github.com/yt-dlp/yt-dlp/commit/f63a7e41d120ef84f0f2274b0962438e3272d2fa) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Default to `main` player JS variant](https://github.com/yt-dlp/yt-dlp/commit/df0553153e41f81e3b30aa5bb1d119c61bd449ac) ([#14079](https://github.com/yt-dlp/yt-dlp/issues/14079)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Extract title and description from initial data](https://github.com/yt-dlp/yt-dlp/commit/7bc53ae79930b36f4f947679545c75f36e9f0ddd) ([#14078](https://github.com/yt-dlp/yt-dlp/issues/14078)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Handle required preroll waiting period](https://github.com/yt-dlp/yt-dlp/commit/a97f4cb57e61e19be61a7d5ac19665d4b567c960) ([#14081](https://github.com/yt-dlp/yt-dlp/issues/14081)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Remove default player params](https://github.com/yt-dlp/yt-dlp/commit/d154dc3dcf0c7c75dbabb6cd1aca66fdd806f858) ([#14081](https://github.com/yt-dlp/yt-dlp/issues/14081)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- tab: [Fix playlists tab extraction](https://github.com/yt-dlp/yt-dlp/commit/8a8861d53864c8a38e924bc0657ead5180f17268) ([#14030](https://github.com/yt-dlp/yt-dlp/issues/14030)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Downloader changes
|
||||||
|
- [Support `available_at` format field](https://github.com/yt-dlp/yt-dlp/commit/438d3f06b3c41bdef8112d40b75d342186e91a16) ([#13980](https://github.com/yt-dlp/yt-dlp/issues/13980)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Postprocessor changes
|
||||||
|
- **xattrmetadata**: [Only set "Where From" attribute on macOS](https://github.com/yt-dlp/yt-dlp/commit/bdeb3eb3f29eebbe8237fbc5186e51e7293eea4a) ([#13999](https://github.com/yt-dlp/yt-dlp/issues/13999)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**
|
||||||
|
- [Add Windows ARM64 builds](https://github.com/yt-dlp/yt-dlp/commit/07247d6c20fef1ad13b6f71f6355a44d308cf010) ([#14003](https://github.com/yt-dlp/yt-dlp/issues/14003)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Bump PyInstaller version to 6.15.0 for Windows](https://github.com/yt-dlp/yt-dlp/commit/681ed2153de754c2c885fdad09ab71fffa8114f9) ([#14002](https://github.com/yt-dlp/yt-dlp/issues/14002)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Discontinue `darwin_legacy_exe` support](https://github.com/yt-dlp/yt-dlp/commit/aea85d525e1007bb64baec0e170c054292d0858a) ([#13860](https://github.com/yt-dlp/yt-dlp/issues/13860)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**
|
||||||
|
- [Remove dead extractors](https://github.com/yt-dlp/yt-dlp/commit/6f4c1bb593da92f0ce68229d0c813cdbaf1314da) ([#13996](https://github.com/yt-dlp/yt-dlp/issues/13996)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- Miscellaneous: [c2fc4f3](https://github.com/yt-dlp/yt-dlp/commit/c2fc4f3e7f6d757250183b177130c64beee50520) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.08.11
|
||||||
|
|
||||||
|
#### Important changes
|
||||||
|
- **The minimum *recommended* Python version has been raised to 3.10**
|
||||||
|
Since Python 3.9 will reach end-of-life in October 2025, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)
|
||||||
|
- **darwin_legacy_exe builds are being discontinued**
|
||||||
|
This release's `yt-dlp_macos_legacy` binary will likely be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13856)
|
||||||
|
- **linux_armv7l_exe builds are being discontinued**
|
||||||
|
This release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Deprecate `darwin_legacy_exe` support](https://github.com/yt-dlp/yt-dlp/commit/cc5a5caac5fbc0d605b52bde0778d6fd5f97b5ab) ([#13857](https://github.com/yt-dlp/yt-dlp/issues/13857)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Deprecate `linux_armv7l_exe` support](https://github.com/yt-dlp/yt-dlp/commit/c76ce28e06c816eb5b261dfb6aff6e69dd9b7382) ([#13978](https://github.com/yt-dlp/yt-dlp/issues/13978)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Raise minimum recommended Python version to 3.10](https://github.com/yt-dlp/yt-dlp/commit/23c658b9cbe34a151f8f921ab1320bb5d4e40a4d) ([#13859](https://github.com/yt-dlp/yt-dlp/issues/13859)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Warn when yt-dlp is severely outdated](https://github.com/yt-dlp/yt-dlp/commit/662af5bb8307ec3ff8ab0857f1159922d64792f0) ([#13937](https://github.com/yt-dlp/yt-dlp/issues/13937)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **cookies**: [Load cookies with float `expires` timestamps](https://github.com/yt-dlp/yt-dlp/commit/28b68f687561468e0c664dcb430707458970019f) ([#13873](https://github.com/yt-dlp/yt-dlp/issues/13873)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **utils**
|
||||||
|
- [Add `WINDOWS_VT_MODE` to globals](https://github.com/yt-dlp/yt-dlp/commit/eed94c7306d4ecdba53ad8783b1463a9af5c97f1) ([#12460](https://github.com/yt-dlp/yt-dlp/issues/12460)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- `parse_resolution`: [Support width-only pattern](https://github.com/yt-dlp/yt-dlp/commit/4385480795acda35667be008d0bf26b46e9d65b4) ([#13802](https://github.com/yt-dlp/yt-dlp/issues/13802)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- `random_user_agent`: [Bump versions](https://github.com/yt-dlp/yt-dlp/commit/c59ad2b066bbccd3cc4eed580842f961bce7dd4a) ([#13543](https://github.com/yt-dlp/yt-dlp/issues/13543)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **archive.org**: [Fix metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/42ca3d601ee10cef89d698f72e2b5d44fab4f013) ([#13880](https://github.com/yt-dlp/yt-dlp/issues/13880)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **digitalconcerthall**: [Fix formats extraction](https://github.com/yt-dlp/yt-dlp/commit/e8d2807296ccc603e031f5982623a8311f2a5119) ([#13948](https://github.com/yt-dlp/yt-dlp/issues/13948)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **eagleplatform**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/1fe83b0111277a6f214c5ec1819cfbf943508baf) ([#13469](https://github.com/yt-dlp/yt-dlp/issues/13469)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **fauliolive**
|
||||||
|
- [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/3e609b2cedd285739bf82c7af7853735092070a4) ([#13421](https://github.com/yt-dlp/yt-dlp/issues/13421)) by [CasperMcFadden95](https://github.com/CasperMcFadden95), [seproDev](https://github.com/seproDev)
|
||||||
|
- [Support Bahry TV](https://github.com/yt-dlp/yt-dlp/commit/daa1859be1b0e7d123da8b4e0988f2eb7bd47d15) ([#13850](https://github.com/yt-dlp/yt-dlp/issues/13850)) by [CasperMcFadden95](https://github.com/CasperMcFadden95)
|
||||||
|
- **fc2**: [Fix old video support](https://github.com/yt-dlp/yt-dlp/commit/cd31c319e3142622ec43c49485d196ed2835df05) ([#12633](https://github.com/yt-dlp/yt-dlp/issues/12633)) by [JChris246](https://github.com/JChris246), [seproDev](https://github.com/seproDev)
|
||||||
|
- **motherless**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/e8d49b1c7f11c7e282319395ca9c2a201304be41) ([#13960](https://github.com/yt-dlp/yt-dlp/issues/13960)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- **n1info**: article: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/6539ee1947d7885d3606da6365fd858308435a63) ([#13865](https://github.com/yt-dlp/yt-dlp/issues/13865)) by [u-spec-png](https://github.com/u-spec-png)
|
||||||
|
- **neteasemusic**: [Support XFF](https://github.com/yt-dlp/yt-dlp/commit/e8c2bf798b6707d27fecde66161172da69c7cd72) ([#11044](https://github.com/yt-dlp/yt-dlp/issues/11044)) by [c-basalt](https://github.com/c-basalt)
|
||||||
|
- **niconico**: [Fix error handling & improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/05e553e9d1f57655d65c9811d05df38261601b85) ([#13240](https://github.com/yt-dlp/yt-dlp/issues/13240)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **parlview**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/485de69dbfeb7de7bcf9f7fe16d6c6ba9e81e1a0) ([#13788](https://github.com/yt-dlp/yt-dlp/issues/13788)) by [barryvan](https://github.com/barryvan)
|
||||||
|
- **plyrembed**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/61d4cd0bc01be6ebe11fd53c2d3805d1a2058990) ([#13836](https://github.com/yt-dlp/yt-dlp/issues/13836)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **royalive**: [Support `en` URLs](https://github.com/yt-dlp/yt-dlp/commit/43dedbe6394bdd489193b15ee9690a62d1b82d94) ([#13908](https://github.com/yt-dlp/yt-dlp/issues/13908)) by [CasperMcFadden95](https://github.com/CasperMcFadden95)
|
||||||
|
- **rtve.es**: program: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/b831406a1d3be34c159835079d12bae624c43610) ([#12955](https://github.com/yt-dlp/yt-dlp/issues/12955)) by [meGAmeS1](https://github.com/meGAmeS1), [seproDev](https://github.com/seproDev)
|
||||||
|
- **shiey**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/6ff135c31914ea8b5545f8d187c60e852cfde9bc) ([#13354](https://github.com/yt-dlp/yt-dlp/issues/13354)) by [iribeirocampos](https://github.com/iribeirocampos)
|
||||||
|
- **sportdeuschland**: [Support embedded player URLs](https://github.com/yt-dlp/yt-dlp/commit/30302df22b7b431ce920e0f7298cd10be9989967) ([#13833](https://github.com/yt-dlp/yt-dlp/issues/13833)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||||
|
- **sproutvideo**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/59765ecbc08d18005de7143fbb1d1caf90239471) ([#13813](https://github.com/yt-dlp/yt-dlp/issues/13813)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **tbs**: [Fix truTV support](https://github.com/yt-dlp/yt-dlp/commit/0adeb1e54b2d7e95cd19999e71013877850f8f41) ([#9683](https://github.com/yt-dlp/yt-dlp/issues/9683)) by [bashonly](https://github.com/bashonly), [ischmidt20](https://github.com/ischmidt20)
|
||||||
|
- **tbsjp**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/71f30921a2023dbb25c53fd1bb1399cac803116d) ([#13485](https://github.com/yt-dlp/yt-dlp/issues/13485)) by [garret1317](https://github.com/garret1317)
|
||||||
|
- **tver**
|
||||||
|
- [Extract Streaks API info](https://github.com/yt-dlp/yt-dlp/commit/70d7687487252a08dbf8b2831743e7833472ba05) ([#13885](https://github.com/yt-dlp/yt-dlp/issues/13885)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Support --ignore-no-formats-error when geo-blocked](https://github.com/yt-dlp/yt-dlp/commit/121647705a2fc6b968278723fe61801007e228a4) ([#13598](https://github.com/yt-dlp/yt-dlp/issues/13598)) by [arabcoders](https://github.com/arabcoders)
|
||||||
|
- **tvw**: news: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/682334e4b35112f7a5798decdcb5cb12230ef948) ([#12907](https://github.com/yt-dlp/yt-dlp/issues/12907)) by [fries1234](https://github.com/fries1234)
|
||||||
|
- **vimeo**: [Fix login support and require authentication](https://github.com/yt-dlp/yt-dlp/commit/afaf60d9fd5a0c7a85aeb1374fd97fbc13cd652c) ([#13823](https://github.com/yt-dlp/yt-dlp/issues/13823)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **yandexdisk**: [Support 360 URLs](https://github.com/yt-dlp/yt-dlp/commit/a6df5e8a58d6743dd230011389c986495ec509da) ([#13935](https://github.com/yt-dlp/yt-dlp/issues/13935)) by [Sojiroh](https://github.com/Sojiroh)
|
||||||
|
- **youtube**
|
||||||
|
- [Add player params to mweb client](https://github.com/yt-dlp/yt-dlp/commit/38c2bf40260f7788efb5a7f5e8eba8e5cb43f741) ([#13914](https://github.com/yt-dlp/yt-dlp/issues/13914)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Update player params](https://github.com/yt-dlp/yt-dlp/commit/bf366517ef0b745490ee9e0f929254fa26b69647) ([#13979](https://github.com/yt-dlp/yt-dlp/issues/13979)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Downloader changes
|
||||||
|
- **dash**: [Re-extract if using --load-info-json with --live-from-start](https://github.com/yt-dlp/yt-dlp/commit/fe53ebe5b66a03c664708a4d6fd87b8c13a1bc7b) ([#13922](https://github.com/yt-dlp/yt-dlp/issues/13922)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **external**: [Work around ffmpeg's `file:` URL handling](https://github.com/yt-dlp/yt-dlp/commit/d399505fdf8292332bdc91d33859a0b0d08104fd) ([#13844](https://github.com/yt-dlp/yt-dlp/issues/13844)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **hls**: [Fix `--hls-split-continuity` support](https://github.com/yt-dlp/yt-dlp/commit/57186f958f164daa50203adcbf7ec74d541151cf) ([#13321](https://github.com/yt-dlp/yt-dlp/issues/13321)) by [tchebb](https://github.com/tchebb)
|
||||||
|
|
||||||
|
#### Postprocessor changes
|
||||||
|
- **embedthumbnail**: [Fix ffmpeg args for embedding in mp3](https://github.com/yt-dlp/yt-dlp/commit/7e3f48d64d237281a97b3df1a61980c78a0302fe) ([#13720](https://github.com/yt-dlp/yt-dlp/issues/13720)) by [atsushi2965](https://github.com/atsushi2965)
|
||||||
|
- **xattrmetadata**: [Add macOS "Where from" attribute](https://github.com/yt-dlp/yt-dlp/commit/3e918d825d7ff367812658957b281b8cda8f9ebb) ([#12664](https://github.com/yt-dlp/yt-dlp/issues/12664)) by [rolandcrosby](https://github.com/rolandcrosby) (With fixes in [1e0c77d](https://github.com/yt-dlp/yt-dlp/commit/1e0c77ddcce335a1875ecc17d93ed6ff3fabd975) by [seproDev](https://github.com/seproDev))
|
||||||
|
|
||||||
|
#### Networking changes
|
||||||
|
- **Request Handler**
|
||||||
|
- curl_cffi: [Support `curl_cffi` 0.11.x, 0.12.x, 0.13.x](https://github.com/yt-dlp/yt-dlp/commit/e98695549e2eb8ce4a59abe16b5afa8adc075bbe) ([#13989](https://github.com/yt-dlp/yt-dlp/issues/13989)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- requests: [Bump minimum required version of urllib3 to 2.0.2](https://github.com/yt-dlp/yt-dlp/commit/8175f3738fe4db3bc629d36bb72b927d4286d3f9) ([#13939](https://github.com/yt-dlp/yt-dlp/issues/13939)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**: [Use `macos-14` runner for `macos` builds](https://github.com/yt-dlp/yt-dlp/commit/66aa21dc5a3b79059c38f3ad1d05dc9b29187701) ([#13814](https://github.com/yt-dlp/yt-dlp/issues/13814)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **ci**: [Bump supported PyPy version to 3.11](https://github.com/yt-dlp/yt-dlp/commit/62e2a9c0d55306906f18da2927e05e1cbc31473c) ([#13877](https://github.com/yt-dlp/yt-dlp/issues/13877)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**
|
||||||
|
- [Move embed tests to dedicated extractors](https://github.com/yt-dlp/yt-dlp/commit/1c6068af997cfc0e28061fc00f4d6091e1de57da) ([#13782](https://github.com/yt-dlp/yt-dlp/issues/13782)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- Miscellaneous: [5e4ceb3](https://github.com/yt-dlp/yt-dlp/commit/5e4ceb35cf997af0dbf100e1de37f4e2bcbaa0b7) by [bashonly](https://github.com/bashonly), [injust](https://github.com/injust), [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
### 2025.07.21
|
||||||
|
|
||||||
|
#### Important changes
|
||||||
|
- **Default behaviour changed from `--mtime` to `--no-mtime`**
|
||||||
|
yt-dlp no longer applies the server modified time to downloaded files by default. [Read more](https://github.com/yt-dlp/yt-dlp/issues/12780)
|
||||||
|
- Security: [[CVE-2025-54072](https://nvd.nist.gov/vuln/detail/CVE-2025-54072)] [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-45hg-7f49-5h56)
|
||||||
|
- When `--exec` is used on Windows, the filepath expanded from `{}` (or the default placeholder) is now properly escaped
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Allow extractors to designate formats/subtitles for impersonation](https://github.com/yt-dlp/yt-dlp/commit/32809eb2da92c649e540a5b714f6235036026161) ([#13778](https://github.com/yt-dlp/yt-dlp/issues/13778)) by [bashonly](https://github.com/bashonly) (With fixes in [3e49bc8](https://github.com/yt-dlp/yt-dlp/commit/3e49bc8a1bdb4109b857f2c361c358e86fa63405), [2ac3eb9](https://github.com/yt-dlp/yt-dlp/commit/2ac3eb98373d1c31341c5e918c83872c7ff409c6))
|
||||||
|
- [Don't let format testing alter the return code](https://github.com/yt-dlp/yt-dlp/commit/4919051e447c7f8ae9df8ba5c4208b6b5c04915a) ([#13767](https://github.com/yt-dlp/yt-dlp/issues/13767)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/commit/959ac99e98c3215437e573c22d64be42d361e863) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- [No longer enable `--mtime` by default](https://github.com/yt-dlp/yt-dlp/commit/f3008bc5f89d2691f2f8dfc51b406ef4e25281c3) ([#12781](https://github.com/yt-dlp/yt-dlp/issues/12781)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Warn when skipping formats](https://github.com/yt-dlp/yt-dlp/commit/1f27a9f8baccb9105f2476154557540efe09a937) ([#13090](https://github.com/yt-dlp/yt-dlp/issues/13090)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **jsinterp**
|
||||||
|
- [Cache undefined variable names](https://github.com/yt-dlp/yt-dlp/commit/b342d27f3f82d913976509ddf5bff539ad8567ec) ([#13639](https://github.com/yt-dlp/yt-dlp/issues/13639)) by [bashonly](https://github.com/bashonly) (With fixes in [805519b](https://github.com/yt-dlp/yt-dlp/commit/805519bfaa7cb5443912dfe45ac774834ba65a16))
|
||||||
|
- [Fix variable scoping](https://github.com/yt-dlp/yt-dlp/commit/b6328ca05030d815222b25d208cc59a964623bf9) ([#13639](https://github.com/yt-dlp/yt-dlp/issues/13639)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
- **utils**
|
||||||
|
- `mimetype2ext`: [Always parse `flac` from `audio/flac`](https://github.com/yt-dlp/yt-dlp/commit/b8abd255e454acbe0023cdb946f9eb461ced7eeb) ([#13748](https://github.com/yt-dlp/yt-dlp/issues/13748)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- `unified_timestamp`: [Return `int` values](https://github.com/yt-dlp/yt-dlp/commit/6be26626f7cfa71d28e0fac2861eb04758810c5d) ([#13796](https://github.com/yt-dlp/yt-dlp/issues/13796)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- `urlhandle_detect_ext`: [Use `x-amz-meta-file-type` headers](https://github.com/yt-dlp/yt-dlp/commit/28bf46b7dafe2e241137763bf570a2f91ba8a53a) ([#13749](https://github.com/yt-dlp/yt-dlp/issues/13749)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- [Add `_search_nextjs_v13_data` helper](https://github.com/yt-dlp/yt-dlp/commit/5245231e4a39ecd5595d4337d46d85e150e2430a) ([#13398](https://github.com/yt-dlp/yt-dlp/issues/13398)) by [bashonly](https://github.com/bashonly) (With fixes in [b5fea53](https://github.com/yt-dlp/yt-dlp/commit/b5fea53f2099bed41ba1b17ab0ac87c8dba5a5ec))
|
||||||
|
- [Detect invalid m3u8 playlist data](https://github.com/yt-dlp/yt-dlp/commit/e99c0b838a9c5feb40c0dcd291bd7b8620b8d36d) ([#13601](https://github.com/yt-dlp/yt-dlp/issues/13601)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- **10play**: [Support new site domain](https://github.com/yt-dlp/yt-dlp/commit/790c286ce3e0b534ca2d8f6648ced220d888f139) ([#13611](https://github.com/yt-dlp/yt-dlp/issues/13611)) by [Georift](https://github.com/Georift)
|
||||||
|
- **9gag**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/0b359b184dee0c7052be482857bf562de67e4928) ([#13678](https://github.com/yt-dlp/yt-dlp/issues/13678)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **aenetworks**: [Support new URL formats](https://github.com/yt-dlp/yt-dlp/commit/5f951ce929b56a822514f1a02cc06af030855ec7) ([#13747](https://github.com/yt-dlp/yt-dlp/issues/13747)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **archive.org**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/d42a6ff0c4ca8893d722ff4e0c109aecbf4cc7cf) ([#13706](https://github.com/yt-dlp/yt-dlp/issues/13706)) by [rdamas](https://github.com/rdamas)
|
||||||
|
- **bandaichannel**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/23e9389f936ec5236a87815b8576e5ce567b2f77) ([#13152](https://github.com/yt-dlp/yt-dlp/issues/13152)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **bandcamp**: [Extract tags](https://github.com/yt-dlp/yt-dlp/commit/f9dff95cb1c138913011417b3bba020c0a691bba) ([#13480](https://github.com/yt-dlp/yt-dlp/issues/13480)) by [WouterGordts](https://github.com/WouterGordts)
|
||||||
|
- **bellmedia**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/6fb3947c0dc6d0e3eab5077c5bada8402f47a277) ([#13429](https://github.com/yt-dlp/yt-dlp/issues/13429)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **bilibili**: [Pass newer user-agent with API requests](https://github.com/yt-dlp/yt-dlp/commit/d3edc5d52a7159eda2331dbc7e14bf40a6585c81) ([#13736](https://github.com/yt-dlp/yt-dlp/issues/13736)) by [c-basalt](https://github.com/c-basalt)
|
||||||
|
- **bilibilibangumi**
|
||||||
|
- [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b15aa8d77257b86fa44c9a42a615dfe47ac5b3b7) ([#13800](https://github.com/yt-dlp/yt-dlp/issues/13800)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Fix geo-block detection](https://github.com/yt-dlp/yt-dlp/commit/884f35d54a64f1e6e7be49459842f573fc3a2701) ([#13667](https://github.com/yt-dlp/yt-dlp/issues/13667)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **blackboardcollaborate**: [Support subtitles and authwalled videos](https://github.com/yt-dlp/yt-dlp/commit/dcc4cba39e2a79d3efce16afa28dbe245468489f) ([#12473](https://github.com/yt-dlp/yt-dlp/issues/12473)) by [flanter21](https://github.com/flanter21)
|
||||||
|
- **btvplus**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/3ae61e0f313dd03a09060abc7a212775c3717818) ([#13541](https://github.com/yt-dlp/yt-dlp/issues/13541)) by [bubo](https://github.com/bubo)
|
||||||
|
- **ctv**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/9f54ea38984788811773ca2ceaca73864acf0e8a) ([#13429](https://github.com/yt-dlp/yt-dlp/issues/13429)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **dangalplay**: [Support other login regions](https://github.com/yt-dlp/yt-dlp/commit/09982bc33e2f1f9a1ff66e6738df44f15b36f6a6) ([#13768](https://github.com/yt-dlp/yt-dlp/issues/13768)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **francetv**: [Improve error handling](https://github.com/yt-dlp/yt-dlp/commit/ade876efb31d55d3394185ffc56942fdc8d325cc) ([#13726](https://github.com/yt-dlp/yt-dlp/issues/13726)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **hotstar**
|
||||||
|
- [Fix support for free accounts](https://github.com/yt-dlp/yt-dlp/commit/07d1d85f6387e4bdb107096f0131c7054f078bb9) ([#13700](https://github.com/yt-dlp/yt-dlp/issues/13700)) by [chauhantirth](https://github.com/chauhantirth)
|
||||||
|
- [Improve error handling](https://github.com/yt-dlp/yt-dlp/commit/7e0af2b1f0c3edb688603b022f3a9ca0bfdf75e9) ([#13727](https://github.com/yt-dlp/yt-dlp/issues/13727)) by [bashonly](https://github.com/bashonly) (With fixes in [ef103b2](https://github.com/yt-dlp/yt-dlp/commit/ef103b2d115bd0e880f9cfd2f7dd705f48e4b40d))
|
||||||
|
- **joqrag**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/6d39c420f7774562a106d90253e2ed5b75036321) ([#13152](https://github.com/yt-dlp/yt-dlp/issues/13152)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **limelight**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/5d693446e882931618c40c99bb593f0b87b30eb9) ([#13267](https://github.com/yt-dlp/yt-dlp/issues/13267)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **lrtradio**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b4b4486effdcb96bb6b8148171a49ff579b69a4a) ([#13717](https://github.com/yt-dlp/yt-dlp/issues/13717)) by [Pawka](https://github.com/Pawka)
|
||||||
|
- **mir24.tv**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/7b4c96e0898db048259ef5fdf12ed14e3605dce3) ([#13651](https://github.com/yt-dlp/yt-dlp/issues/13651)) by [swayll](https://github.com/swayll)
|
||||||
|
- **mixlr**: [Add extractors](https://github.com/yt-dlp/yt-dlp/commit/0f33950c778331bf4803c76e8b0ba1862df93431) ([#13561](https://github.com/yt-dlp/yt-dlp/issues/13561)) by [seproDev](https://github.com/seproDev), [ShockedPlot7560](https://github.com/ShockedPlot7560)
|
||||||
|
- **mlbtv**: [Make formats downloadable with ffmpeg](https://github.com/yt-dlp/yt-dlp/commit/87e3dc8c7f78929d2ef4f4a44e6a567e04cd8226) ([#13761](https://github.com/yt-dlp/yt-dlp/issues/13761)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **newspicks**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/2aaf1aa71d174700859c9ec1a81109b78e34961c) ([#13612](https://github.com/yt-dlp/yt-dlp/issues/13612)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **nhkradiru**: [Fix metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/7c49a937887756efcfa162abdcf17e48c244cb0c) ([#12708](https://github.com/yt-dlp/yt-dlp/issues/12708)) by [garret1317](https://github.com/garret1317)
|
||||||
|
- **noovo**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/d57a0b5aa78d59324b037d37492fe86aa4fbf58a) ([#13429](https://github.com/yt-dlp/yt-dlp/issues/13429)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **patreon**: campaign: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/d88b304d44c599d81acfa4231502270c8b9fe2f8) ([#13712](https://github.com/yt-dlp/yt-dlp/issues/13712)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **playerfm**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/1a8474c3ca6dbe51bb153b2b8eef7b9a61fa7dc3) ([#13016](https://github.com/yt-dlp/yt-dlp/issues/13016)) by [R0hanW](https://github.com/R0hanW)
|
||||||
|
- **rai**: [Fix formats extraction](https://github.com/yt-dlp/yt-dlp/commit/c8329fc572903eeed7edad1642773b2268b71a62) ([#13572](https://github.com/yt-dlp/yt-dlp/issues/13572)) by [moonshinerd](https://github.com/moonshinerd), [seproDev](https://github.com/seproDev)
|
||||||
|
- **raisudtirol**: [Support alternative domain](https://github.com/yt-dlp/yt-dlp/commit/85c3fa1925a9057ef4ae8af682686d5b3eb8e568) ([#13718](https://github.com/yt-dlp/yt-dlp/issues/13718)) by [barsnick](https://github.com/barsnick)
|
||||||
|
- **skeb**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/060c6a4501a0b8a92f1b9c12788f556d902c83c6) ([#13593](https://github.com/yt-dlp/yt-dlp/issues/13593)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **soundcloud**: [Always extract original format extension](https://github.com/yt-dlp/yt-dlp/commit/c1ac543c8166ff031d62e340b3244ca8556e3fb9) ([#13746](https://github.com/yt-dlp/yt-dlp/issues/13746)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **sproutvideo**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0b41746964e1d0470ac286ce09408940a3a51147) ([#13610](https://github.com/yt-dlp/yt-dlp/issues/13610)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **thehighwire**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/3a84be9d1660ef798ea28f929a20391bef6afda4) ([#13505](https://github.com/yt-dlp/yt-dlp/issues/13505)) by [swayll](https://github.com/swayll)
|
||||||
|
- **twitch**: [Improve error handling](https://github.com/yt-dlp/yt-dlp/commit/422cc8cb2ff2bd3b4c2bc64e23507b7e6f522c35) ([#13618](https://github.com/yt-dlp/yt-dlp/issues/13618)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **unitednationswebtv**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/630f3389c33f0f7f6ec97e8917d20aeb4e4078da) ([#13538](https://github.com/yt-dlp/yt-dlp/issues/13538)) by [averageFOSSenjoyer](https://github.com/averageFOSSenjoyer)
|
||||||
|
- **vimeo**
|
||||||
|
- [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a5d697f62d8be78ffd472acb2f52c8bc32833003) ([#13692](https://github.com/yt-dlp/yt-dlp/issues/13692)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Handle age-restricted videos](https://github.com/yt-dlp/yt-dlp/commit/a6db1d297ab40cc346de24aacbeab93112b2f4e1) ([#13719](https://github.com/yt-dlp/yt-dlp/issues/13719)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **youtube**
|
||||||
|
- [Do not require PO Token for premium accounts](https://github.com/yt-dlp/yt-dlp/commit/5b57b72c1a7c6bd249ffcebdf5630761ec664c10) ([#13640](https://github.com/yt-dlp/yt-dlp/issues/13640)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Ensure context params are consistent for web clients](https://github.com/yt-dlp/yt-dlp/commit/6e5bee418bc108565108153fd745c8e7a59f16dd) ([#13701](https://github.com/yt-dlp/yt-dlp/issues/13701)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Extract global nsig helper functions](https://github.com/yt-dlp/yt-dlp/commit/fca94ac5d63ed6578b5cd9c8129d97a8a713c39a) ([#13639](https://github.com/yt-dlp/yt-dlp/issues/13639)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
- [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/0e68332bcb9fba87c42805b7a051eeb2bed36206) ([#13659](https://github.com/yt-dlp/yt-dlp/issues/13659)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Log bad playability statuses of player responses](https://github.com/yt-dlp/yt-dlp/commit/aa9f1f4d577e99897ac16cd19d4e217d688ea75d) ([#13647](https://github.com/yt-dlp/yt-dlp/issues/13647)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Use impersonation for downloading subtitles](https://github.com/yt-dlp/yt-dlp/commit/8820101aa3152e5f4811541c645f8b5de231ba8c) ([#13786](https://github.com/yt-dlp/yt-dlp/issues/13786)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- tab: [Fix subscriptions feed extraction](https://github.com/yt-dlp/yt-dlp/commit/c23d837b6524d1e7a4595948871ba1708cba4dfa) ([#13665](https://github.com/yt-dlp/yt-dlp/issues/13665)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Downloader changes
|
||||||
|
- **hls**: [Do not fall back to ffmpeg when native is required](https://github.com/yt-dlp/yt-dlp/commit/a7113722ec33f30fc898caee9242af2b82188a53) ([#13655](https://github.com/yt-dlp/yt-dlp/issues/13655)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Networking changes
|
||||||
|
- **Request Handler**
|
||||||
|
- requests
|
||||||
|
- [Refactor default headers](https://github.com/yt-dlp/yt-dlp/commit/a4561c7a66c39d88efe7ae51e7fa1986faf093fb) ([#13785](https://github.com/yt-dlp/yt-dlp/issues/13785)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Work around partial read dropping data](https://github.com/yt-dlp/yt-dlp/commit/c2ff2dbaec7929015373fe002e9bd4849931a4ce) ([#13599](https://github.com/yt-dlp/yt-dlp/issues/13599)) by [Grub4K](https://github.com/Grub4K) (With fixes in [c316416](https://github.com/yt-dlp/yt-dlp/commit/c316416b972d1b05e58fbcc21e80428b900ce102))
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **cleanup**
|
||||||
|
- [Bump ruff to 0.12.x](https://github.com/yt-dlp/yt-dlp/commit/ca5cce5b07d51efe7310b449cdefeca8d873e9df) ([#13596](https://github.com/yt-dlp/yt-dlp/issues/13596)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- Miscellaneous: [9951fdd](https://github.com/yt-dlp/yt-dlp/commit/9951fdd0d08b655cb1af8cd7f32a3fb7e2b1324e) by [adamralph](https://github.com/adamralph), [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [hseg](https://github.com/hseg), [InvalidUsernameException](https://github.com/InvalidUsernameException), [seproDev](https://github.com/seproDev)
|
||||||
|
- **devscripts**: [Fix filename/directory Bash completions](https://github.com/yt-dlp/yt-dlp/commit/99093e96fd6a26dea9d6e4bd1e4b16283b6ad1ee) ([#13620](https://github.com/yt-dlp/yt-dlp/issues/13620)) by [barsnick](https://github.com/barsnick)
|
||||||
|
- **test**: download: [Support `playlist_maxcount`](https://github.com/yt-dlp/yt-dlp/commit/fd36b8f31bafbd8096bdb92a446a0c9c6081209c) ([#13433](https://github.com/yt-dlp/yt-dlp/issues/13433)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||||
|
|
||||||
|
### 2025.06.30
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- **jsinterp**: [Fix `extract_object`](https://github.com/yt-dlp/yt-dlp/commit/958153a226214c86879e36211ac191bf78289578) ([#13580](https://github.com/yt-dlp/yt-dlp/issues/13580)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **bilibilispacevideo**: [Extract hidden-mode collections as playlists](https://github.com/yt-dlp/yt-dlp/commit/99b85ac102047446e6adf5b62bfc3c8d80b53778) ([#13533](https://github.com/yt-dlp/yt-dlp/issues/13533)) by [c-basalt](https://github.com/c-basalt)
|
||||||
|
- **hotstar**
|
||||||
|
- [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b5bd057fe86550f3aa67f2fc8790d1c6a251c57b) ([#13530](https://github.com/yt-dlp/yt-dlp/issues/13530)) by [bashonly](https://github.com/bashonly), [chauhantirth](https://github.com/chauhantirth) (With fixes in [e9f1576](https://github.com/yt-dlp/yt-dlp/commit/e9f157669e24953a88d15ce22053649db7a8e81e) by [bashonly](https://github.com/bashonly))
|
||||||
|
- [Fix metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/0a6b1044899f452cd10b6c7a6b00fa985a9a8b97) ([#13560](https://github.com/yt-dlp/yt-dlp/issues/13560)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Raise for login required](https://github.com/yt-dlp/yt-dlp/commit/5e292baad62c749b6c340621ab2d0f904165ddfb) ([#10405](https://github.com/yt-dlp/yt-dlp/issues/10405)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- series: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/4bd9a7ade7e0508b9795b3e72a69eeb40788b62b) ([#13564](https://github.com/yt-dlp/yt-dlp/issues/13564)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **jiocinema**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/7e2504f941a11ea2b0dba00de3f0295cdc253e79) ([#13565](https://github.com/yt-dlp/yt-dlp/issues/13565)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **kick**: [Support subscriber-only content](https://github.com/yt-dlp/yt-dlp/commit/b16722ede83377f77ea8352dcd0a6ca8e83b8f0f) ([#13550](https://github.com/yt-dlp/yt-dlp/issues/13550)) by [helpimnotdrowning](https://github.com/helpimnotdrowning)
|
||||||
|
- **niconico**: live: [Fix extractor and downloader](https://github.com/yt-dlp/yt-dlp/commit/06c1a8cdffe14050206683253726875144192ef5) ([#13158](https://github.com/yt-dlp/yt-dlp/issues/13158)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **sauceplus**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/35fc33fbc51c7f5392fb2300f65abf6cf107ef90) ([#13567](https://github.com/yt-dlp/yt-dlp/issues/13567)) by [bashonly](https://github.com/bashonly), [ceandreasen](https://github.com/ceandreasen)
|
||||||
|
- **sproutvideo**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/11b9416e10cff7513167d76d6c47774fcdd3e26a) ([#13589](https://github.com/yt-dlp/yt-dlp/issues/13589)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **youtube**: [Fix premium formats extraction](https://github.com/yt-dlp/yt-dlp/commit/2ba5391cd68ed4f2415c827d2cecbcbc75ace10b) ([#13586](https://github.com/yt-dlp/yt-dlp/issues/13586)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **ci**: [Add signature tests](https://github.com/yt-dlp/yt-dlp/commit/1b883846347addeab12663fd74317fd544341a1c) ([#13582](https://github.com/yt-dlp/yt-dlp/issues/13582)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**: Miscellaneous: [b018784](https://github.com/yt-dlp/yt-dlp/commit/b0187844988e557c7e1e6bb1aabd4c1176768d86) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.06.25
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- [Add `_search_nuxt_json` helper](https://github.com/yt-dlp/yt-dlp/commit/51887484e46ab6015c041cb1ab626a55f25a03bd) ([#13386](https://github.com/yt-dlp/yt-dlp/issues/13386)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K)
|
||||||
|
- **brightcove**: new: [Improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/e6bd4a3da295b760ab20b39c18ce8934d312c2bf) ([#13461](https://github.com/yt-dlp/yt-dlp/issues/13461)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **huya**: live: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/2600849badb0d08c55b58dcc77a13af6ba423da6) ([#13520](https://github.com/yt-dlp/yt-dlp/issues/13520)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **hypergryph**: [Improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/1722c55400ff30bb5aee5dd7a262f0b7e9ce2f0e) ([#13415](https://github.com/yt-dlp/yt-dlp/issues/13415)) by [doe1080](https://github.com/doe1080), [eason1478](https://github.com/eason1478)
|
||||||
|
- **lsm**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/c57412d1f9cf0124adc972a47858ac42b740c61d) ([#13126](https://github.com/yt-dlp/yt-dlp/issues/13126)) by [Caesim404](https://github.com/Caesim404)
|
||||||
|
- **mave**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/1838a1ce5d4ade80770ba9162eaffc9a1607dc70) ([#13380](https://github.com/yt-dlp/yt-dlp/issues/13380)) by [anlar](https://github.com/anlar)
|
||||||
|
- **sportdeutschland**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a4ce4327c9836691d3b6b00e44a90b6741601ed8) ([#13519](https://github.com/yt-dlp/yt-dlp/issues/13519)) by [DTrombett](https://github.com/DTrombett)
|
||||||
|
- **sproutvideo**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/5b559d0072b7164daf06bacdc41c6f11283452c8) ([#13544](https://github.com/yt-dlp/yt-dlp/issues/13544)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **tv8.it**: [Support slugless URLs](https://github.com/yt-dlp/yt-dlp/commit/3bd30291601c47fa4a257983473884103ecab0c7) ([#13478](https://github.com/yt-dlp/yt-dlp/issues/13478)) by [DTrombett](https://github.com/DTrombett)
|
||||||
|
- **youtube**
|
||||||
|
- [Check any `ios` m3u8 formats prior to download](https://github.com/yt-dlp/yt-dlp/commit/8f94b76cbf7bbd9dfd8762c63cdea04f90f1297f) ([#13524](https://github.com/yt-dlp/yt-dlp/issues/13524)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Improve player context payloads](https://github.com/yt-dlp/yt-dlp/commit/ff6f94041aeee19c5559e1c1cd693960a1c1dd14) ([#13539](https://github.com/yt-dlp/yt-dlp/issues/13539)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **test**: `traversal`: [Fix morsel tests for Python 3.14](https://github.com/yt-dlp/yt-dlp/commit/73bf10211668e4a59ccafd790e06ee82d9fea9ea) ([#13471](https://github.com/yt-dlp/yt-dlp/issues/13471)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
### 2025.06.09
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- [Improve JSON LD thumbnails extraction](https://github.com/yt-dlp/yt-dlp/commit/85c8a405e3651dc041b758f4744d4fb3c4c55e01) ([#13368](https://github.com/yt-dlp/yt-dlp/issues/13368)) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080)
|
||||||
|
- **10play**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/6d265388c6e943419ac99e9151cf75a3265f980f) ([#13349](https://github.com/yt-dlp/yt-dlp/issues/13349)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **adobepass**
|
||||||
|
- [Add Fubo MSO](https://github.com/yt-dlp/yt-dlp/commit/eee90acc47d7f8de24afaa8b0271ccaefdf6e88c) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [maxbin123](https://github.com/maxbin123)
|
||||||
|
- [Always add newer user-agent when required](https://github.com/yt-dlp/yt-dlp/commit/0ee1102268cf31b07f8a8318a47424c66b2f7378) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Fix Philo MSO authentication](https://github.com/yt-dlp/yt-dlp/commit/943083edcd3df45aaa597a6967bc6c95b720f54c) ([#13335](https://github.com/yt-dlp/yt-dlp/issues/13335)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||||
|
- [Rework to require software statement](https://github.com/yt-dlp/yt-dlp/commit/711c5d5d098fee2992a1a624b1c4b30364b91426) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly), [maxbin123](https://github.com/maxbin123)
|
||||||
|
- [Validate login URL before sending credentials](https://github.com/yt-dlp/yt-dlp/commit/89c1b349ad81318d9d3bea76c01c891696e58d38) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **aenetworks**
|
||||||
|
- [Fix playlist extractors](https://github.com/yt-dlp/yt-dlp/commit/f37d599a697e82fe68b423865897d55bae34f373) ([#13408](https://github.com/yt-dlp/yt-dlp/issues/13408)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||||
|
- [Fix provider-locked content extraction](https://github.com/yt-dlp/yt-dlp/commit/6693d6603358ae6beca834dbd822a7917498b813) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [maxbin123](https://github.com/maxbin123)
|
||||||
|
- **bilibilibangumi**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/13e55162719528d42d2133e16b65ff59a667a6e4) ([#13416](https://github.com/yt-dlp/yt-dlp/issues/13416)) by [c-basalt](https://github.com/c-basalt)
|
||||||
|
- **brightcove**: new: [Adapt to new AdobePass requirement](https://github.com/yt-dlp/yt-dlp/commit/98f8eec956e3b16cb66a3d49cc71af3807db795e) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cu.ntv.co.jp**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/aa863ddab9b1d104678e9cf39bb76f5b14fca660) ([#13302](https://github.com/yt-dlp/yt-dlp/issues/13302)) by [doe1080](https://github.com/doe1080), [nullpos](https://github.com/nullpos)
|
||||||
|
- **go**: [Fix provider-locked content extraction](https://github.com/yt-dlp/yt-dlp/commit/2e5bf002dad16f5ce35aa2023d392c9e518fcd8f) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly), [maxbin123](https://github.com/maxbin123)
|
||||||
|
- **nbc**: [Rework and adapt extractors to new AdobePass flow](https://github.com/yt-dlp/yt-dlp/commit/2d7949d5642bc37d1e71bf00c9a55260e5505d58) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **nobelprize**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/97ddfefeb4faba6e61cd80996c16952b8eab16f3) ([#13205](https://github.com/yt-dlp/yt-dlp/issues/13205)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **odnoklassniki**: [Detect and raise when login is required](https://github.com/yt-dlp/yt-dlp/commit/148a1eb4c59e127965396c7a6e6acf1979de459e) ([#13361](https://github.com/yt-dlp/yt-dlp/issues/13361)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **patreon**: [Fix m3u8 formats extraction](https://github.com/yt-dlp/yt-dlp/commit/e0d6c0822930f6e63f574d46d946a58b73ecd10c) ([#13266](https://github.com/yt-dlp/yt-dlp/issues/13266)) by [bashonly](https://github.com/bashonly) (With fixes in [1a8a03e](https://github.com/yt-dlp/yt-dlp/commit/1a8a03ea8d827107319a18076ee3505090667c5a))
|
||||||
|
- **podchaser**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/538eb305673c26bff6a2b12f1c96375fe02ce41a) ([#13271](https://github.com/yt-dlp/yt-dlp/issues/13271)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **sr**: mediathek: [Improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/e3c605a61f4cc2de9059f37434fa108c3c20f58e) ([#13294](https://github.com/yt-dlp/yt-dlp/issues/13294)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **stacommu**: [Avoid partial stream formats](https://github.com/yt-dlp/yt-dlp/commit/5d96527be80dc1ed1702d9cd548ff86de570ad70) ([#13412](https://github.com/yt-dlp/yt-dlp/issues/13412)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **startrek**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a8bf0011bde92b3f1324a98bfbd38932fd3ebe18) ([#13188](https://github.com/yt-dlp/yt-dlp/issues/13188)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **svt**: play: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/e1b6062f8c4a3fa33c65269d48d09ec78de765a2) ([#13329](https://github.com/yt-dlp/yt-dlp/issues/13329)) by [barsnick](https://github.com/barsnick), [bashonly](https://github.com/bashonly)
|
||||||
|
- **telecinco**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/03dba2012d9bd3f402fa8c2f122afba89bbd22a4) ([#13379](https://github.com/yt-dlp/yt-dlp/issues/13379)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **theplatform**: [Improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/ed108b3ea481c6a4b5215a9302ba92d74baa2425) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **toutiao**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/f8051e3a61686c5db1de5f5746366ecfbc3ad20c) ([#13246](https://github.com/yt-dlp/yt-dlp/issues/13246)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **turner**: [Adapt extractors to new AdobePass flow](https://github.com/yt-dlp/yt-dlp/commit/0daddc780d3ac5bebc3a3ec5b884d9243cbc0745) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **twitcasting**: [Fix password-protected livestream support](https://github.com/yt-dlp/yt-dlp/commit/52f9729c9a92ad4656d746ff0b1acecb87b3e96d) ([#13097](https://github.com/yt-dlp/yt-dlp/issues/13097)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **twitter**: broadcast: [Support events URLs](https://github.com/yt-dlp/yt-dlp/commit/7794374de8afb20499b023107e2abfd4e6b93ee4) ([#13248](https://github.com/yt-dlp/yt-dlp/issues/13248)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **umg**: de: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/4e7c1ea346b510280218b47e8653dbbca3a69870) ([#13373](https://github.com/yt-dlp/yt-dlp/issues/13373)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **vice**: [Mark extractors as broken](https://github.com/yt-dlp/yt-dlp/commit/6121559e027a04574690799c1776bc42bb51af31) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **vimeo**: [Extract subtitles from player subdomain](https://github.com/yt-dlp/yt-dlp/commit/c723c4e5e78263df178dbe69844a3d05f3ef9e35) ([#13350](https://github.com/yt-dlp/yt-dlp/issues/13350)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **watchespn**: [Fix provider-locked content extraction](https://github.com/yt-dlp/yt-dlp/commit/b094747e93cfb0a2c53007120e37d0d84d41f030) ([#13131](https://github.com/yt-dlp/yt-dlp/issues/13131)) by [maxbin123](https://github.com/maxbin123)
|
||||||
|
- **weverse**: [Support login with oauth refresh tokens](https://github.com/yt-dlp/yt-dlp/commit/3fe72e9eea38d9a58211cde42cfaa577ce020e2c) ([#13284](https://github.com/yt-dlp/yt-dlp/issues/13284)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **youtube**
|
||||||
|
- [Add `tv_simply` player client](https://github.com/yt-dlp/yt-dlp/commit/1fd0e88b67db53ad163393d6965f68e908fa70e3) ([#13389](https://github.com/yt-dlp/yt-dlp/issues/13389)) by [gamer191](https://github.com/gamer191)
|
||||||
|
- [Extract srt subtitles](https://github.com/yt-dlp/yt-dlp/commit/231349786e8c42089c2e079ec94c0ea866c37999) ([#13411](https://github.com/yt-dlp/yt-dlp/issues/13411)) by [gamer191](https://github.com/gamer191)
|
||||||
|
- [Fix `--mark-watched` support](https://github.com/yt-dlp/yt-dlp/commit/b5be29fa58ec98226e11621fd9c58585bcff6879) ([#13222](https://github.com/yt-dlp/yt-dlp/issues/13222)) by [brian6932](https://github.com/brian6932), [iednod55](https://github.com/iednod55)
|
||||||
|
- [Fix automatic captions for some client combinations](https://github.com/yt-dlp/yt-dlp/commit/53ea743a9c158f8ca2d75a09ca44ba68606042d8) ([#13268](https://github.com/yt-dlp/yt-dlp/issues/13268)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Improve signature extraction debug output](https://github.com/yt-dlp/yt-dlp/commit/d30a49742cfa22e61c47df4ac0e7334d648fb85d) ([#13327](https://github.com/yt-dlp/yt-dlp/issues/13327)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Rework nsig function name extraction](https://github.com/yt-dlp/yt-dlp/commit/9e38b273b7ac942e7e9fc05a651ed810ab7d30ba) ([#13403](https://github.com/yt-dlp/yt-dlp/issues/13403)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- [nsig code improvements and cleanup](https://github.com/yt-dlp/yt-dlp/commit/f7bbf5a617f9ab54ef51eaef99be36e175b5e9c3) ([#13280](https://github.com/yt-dlp/yt-dlp/issues/13280)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **zdf**: [Fix language extraction and format sorting](https://github.com/yt-dlp/yt-dlp/commit/db162b76f6bdece50babe2e0cacfe56888c2e125) ([#13313](https://github.com/yt-dlp/yt-dlp/issues/13313)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**
|
||||||
|
- [Exclude `pkg_resources` from being collected](https://github.com/yt-dlp/yt-dlp/commit/cc749a8a3b8b6e5c05318868c72a403f376a1b38) ([#13320](https://github.com/yt-dlp/yt-dlp/issues/13320)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Fix macOS requirements caching](https://github.com/yt-dlp/yt-dlp/commit/201812100f315c6727a4418698d5b4e8a79863d4) ([#13328](https://github.com/yt-dlp/yt-dlp/issues/13328)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**: Miscellaneous: [339614a](https://github.com/yt-dlp/yt-dlp/commit/339614a173c74b42d63e858c446a9cae262a13af) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **test**: postprocessors: [Remove binary thumbnail test data](https://github.com/yt-dlp/yt-dlp/commit/a9b370069838e84d44ac7ad095d657003665885a) ([#13341](https://github.com/yt-dlp/yt-dlp/issues/13341)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.05.22
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- **cookies**: [Fix Linux desktop environment detection](https://github.com/yt-dlp/yt-dlp/commit/e491fd4d090db3af52a82863fb0553dd5e17fb85) ([#13197](https://github.com/yt-dlp/yt-dlp/issues/13197)) by [mbway](https://github.com/mbway)
|
||||||
|
- **jsinterp**: [Fix increment/decrement evaluation](https://github.com/yt-dlp/yt-dlp/commit/167d7a9f0ffd1b4fe600193441bdb7358db2740b) ([#13238](https://github.com/yt-dlp/yt-dlp/issues/13238)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **1tv**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/41c0a1fb89628696f8bb88e2b9f3a68f355b8c26) ([#13168](https://github.com/yt-dlp/yt-dlp/issues/13168)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **amcnetworks**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/464c84fedf78eef822a431361155f108b5df96d7) ([#13147](https://github.com/yt-dlp/yt-dlp/issues/13147)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **bitchute**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/1d0f6539c47e5d5c68c3c47cdb7075339e2885ac) ([#13081](https://github.com/yt-dlp/yt-dlp/issues/13081)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cartoonnetwork**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/7dbb47f84f0ee1266a3a01f58c9bc4c76d76794a) ([#13148](https://github.com/yt-dlp/yt-dlp/issues/13148)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **iprima**: [Fix login support](https://github.com/yt-dlp/yt-dlp/commit/a7d9a5eb79ceeecb851389f3f2c88597871ca3f2) ([#12937](https://github.com/yt-dlp/yt-dlp/issues/12937)) by [baierjan](https://github.com/baierjan)
|
||||||
|
- **jiosaavn**
|
||||||
|
- artist: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/586b557b124f954d3f625360ebe970989022ad97) ([#12803](https://github.com/yt-dlp/yt-dlp/issues/12803)) by [subrat-lima](https://github.com/subrat-lima)
|
||||||
|
- playlist, show: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/317f4b8006c2c0f0f64f095b1485163ad97c9053) ([#12803](https://github.com/yt-dlp/yt-dlp/issues/12803)) by [subrat-lima](https://github.com/subrat-lima)
|
||||||
|
- show: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/6839276496d8814cf16f58b637e45663467928e6) ([#12803](https://github.com/yt-dlp/yt-dlp/issues/12803)) by [subrat-lima](https://github.com/subrat-lima)
|
||||||
|
- **lrtradio**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/abf58dcd6a09e14eec4ea82ae12f79a0337cb383) ([#13200](https://github.com/yt-dlp/yt-dlp/issues/13200)) by [Pawka](https://github.com/Pawka)
|
||||||
|
- **nebula**: [Support `--mark-watched`](https://github.com/yt-dlp/yt-dlp/commit/20f288bdc2173c7cc58d709d25ca193c1f6001e7) ([#13120](https://github.com/yt-dlp/yt-dlp/issues/13120)) by [GeoffreyFrogeye](https://github.com/GeoffreyFrogeye)
|
||||||
|
- **niconico**
|
||||||
|
- [Fix error handling](https://github.com/yt-dlp/yt-dlp/commit/f569be4602c2a857087e495d5d7ed6060cd97abe) ([#13236](https://github.com/yt-dlp/yt-dlp/issues/13236)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- live: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/7a7b85c9014d96421e18aa7ea5f4c1bee5ceece0) ([#13045](https://github.com/yt-dlp/yt-dlp/issues/13045)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **nytimesarticle**: [Fix extraction](https://github.com/yt-dlp/yt-dlp/commit/b26bc32579c00ef579d75a835807ccc87d20ee0a) ([#13104](https://github.com/yt-dlp/yt-dlp/issues/13104)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **once**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/f475e8b529d18efdad603ffda02a56e707fe0e2c) ([#13164](https://github.com/yt-dlp/yt-dlp/issues/13164)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **picarto**: vod: [Support `/profile/` video URLs](https://github.com/yt-dlp/yt-dlp/commit/31e090cb787f3504ec25485adff9a2a51d056734) ([#13227](https://github.com/yt-dlp/yt-dlp/issues/13227)) by [subrat-lima](https://github.com/subrat-lima)
|
||||||
|
- **playsuisse**: [Improve metadata extraction](https://github.com/yt-dlp/yt-dlp/commit/d880e060803ae8ed5a047e578cca01e1f0e630ce) ([#12466](https://github.com/yt-dlp/yt-dlp/issues/12466)) by [v3DJG6GL](https://github.com/v3DJG6GL)
|
||||||
|
- **sprout**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/cbcfe6378dde33a650e3852ab17ad4503b8e008d) ([#13149](https://github.com/yt-dlp/yt-dlp/issues/13149)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **svtpage**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/ea8498ed534642dd7e925961b97b934987142fd3) ([#12957](https://github.com/yt-dlp/yt-dlp/issues/12957)) by [diman8](https://github.com/diman8)
|
||||||
|
- **twitch**: [Support `--live-from-start`](https://github.com/yt-dlp/yt-dlp/commit/00b1bec55249cf2ad6271d36492c51b34b6459d1) ([#13202](https://github.com/yt-dlp/yt-dlp/issues/13202)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **vimeo**: event: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/545c1a5b6f2fe88722b41aef0e7485bf3be3f3f9) ([#13216](https://github.com/yt-dlp/yt-dlp/issues/13216)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **wat.tv**: [Improve error handling](https://github.com/yt-dlp/yt-dlp/commit/f123cc83b3aea45053f5fa1d9141048b01fc2774) ([#13111](https://github.com/yt-dlp/yt-dlp/issues/13111)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **weverse**: [Fix live extraction](https://github.com/yt-dlp/yt-dlp/commit/5328eda8820cc5f21dcf917684d23fbdca41831d) ([#13084](https://github.com/yt-dlp/yt-dlp/issues/13084)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **xinpianchang**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/83fabf352489d52843f67e6e9cc752db86d27e6e) ([#13245](https://github.com/yt-dlp/yt-dlp/issues/13245)) by [garret1317](https://github.com/garret1317)
|
||||||
|
- **youtube**
|
||||||
|
- [Add PO token support for subtitles](https://github.com/yt-dlp/yt-dlp/commit/32ed5f107c6c641958d1cd2752e130de4db55a13) ([#13234](https://github.com/yt-dlp/yt-dlp/issues/13234)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Add `web_embedded` client for age-restricted videos](https://github.com/yt-dlp/yt-dlp/commit/0feec6dc131f488428bf881519e7c69766fbb9ae) ([#13089](https://github.com/yt-dlp/yt-dlp/issues/13089)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Add a PO Token Provider Framework](https://github.com/yt-dlp/yt-dlp/commit/2685654a37141cca63eda3a92da0e2706e23ccfd) ([#12840](https://github.com/yt-dlp/yt-dlp/issues/12840)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Extract `media_type` for all videos](https://github.com/yt-dlp/yt-dlp/commit/ded11ebc9afba6ba33923375103e9be2d7c804e7) ([#13136](https://github.com/yt-dlp/yt-dlp/issues/13136)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Fix `--live-from-start` support for premieres](https://github.com/yt-dlp/yt-dlp/commit/8f303afb43395be360cafd7ad4ce2b6e2eedfb8a) ([#13079](https://github.com/yt-dlp/yt-dlp/issues/13079)) by [arabcoders](https://github.com/arabcoders)
|
||||||
|
- [Fix geo-restriction error handling](https://github.com/yt-dlp/yt-dlp/commit/c7e575e31608c19c5b26c10a4229db89db5fc9a8) ([#13217](https://github.com/yt-dlp/yt-dlp/issues/13217)) by [yozel](https://github.com/yozel)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**
|
||||||
|
- [Bump PyInstaller to v6.13.0](https://github.com/yt-dlp/yt-dlp/commit/17cf9088d0d535e4a7feffbf02bd49cd9dae5ab9) ([#13082](https://github.com/yt-dlp/yt-dlp/issues/13082)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Bump run-on-arch-action to v3](https://github.com/yt-dlp/yt-dlp/commit/9064d2482d1fe722bbb4a49731fe0711c410d1c8) ([#13088](https://github.com/yt-dlp/yt-dlp/issues/13088)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**: Miscellaneous: [7977b32](https://github.com/yt-dlp/yt-dlp/commit/7977b329ed97b216e37bd402f4935f28c00eac9e) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
### 2025.04.30
|
### 2025.04.30
|
||||||
|
|
||||||
#### Important changes
|
#### Important changes
|
||||||
|
|||||||
9
Makefile
9
Makefile
@@ -10,7 +10,7 @@ tar: yt-dlp.tar.gz
|
|||||||
# intended use: when building a source distribution,
|
# intended use: when building a source distribution,
|
||||||
# make pypi-files && python3 -m build -sn .
|
# make pypi-files && python3 -m build -sn .
|
||||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||||
|
|
||||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||||
completions completion-bash completion-fish completion-zsh \
|
completions completion-bash completion-fish completion-zsh \
|
||||||
@@ -18,10 +18,11 @@ pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
|||||||
tar pypi-files lazy-extractors install uninstall
|
tar pypi-files lazy-extractors install uninstall
|
||||||
|
|
||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp \
|
||||||
|
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||||
@@ -158,7 +159,7 @@ yt-dlp.tar.gz: all
|
|||||||
README.md supportedsites.md Changelog.md LICENSE \
|
README.md supportedsites.md Changelog.md LICENSE \
|
||||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||||
|
|
||||||
AUTHORS: Changelog.md
|
AUTHORS: Changelog.md
|
||||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
|||||||
131
README.md
131
README.md
@@ -44,6 +44,7 @@
|
|||||||
* [Post-processing Options](#post-processing-options)
|
* [Post-processing Options](#post-processing-options)
|
||||||
* [SponsorBlock Options](#sponsorblock-options)
|
* [SponsorBlock Options](#sponsorblock-options)
|
||||||
* [Extractor Options](#extractor-options)
|
* [Extractor Options](#extractor-options)
|
||||||
|
* [Preset Aliases](#preset-aliases)
|
||||||
* [CONFIGURATION](#configuration)
|
* [CONFIGURATION](#configuration)
|
||||||
* [Configuration file encoding](#configuration-file-encoding)
|
* [Configuration file encoding](#configuration-file-encoding)
|
||||||
* [Authentication with netrc](#authentication-with-netrc)
|
* [Authentication with netrc](#authentication-with-netrc)
|
||||||
@@ -104,13 +105,21 @@ #### Alternatives
|
|||||||
|
|
||||||
File|Description
|
File|Description
|
||||||
:---|:---
|
:---|:---
|
||||||
|
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
|
||||||
|
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
|
||||||
|
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
|
||||||
|
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
|
||||||
|
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
|
||||||
|
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
|
||||||
|
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
|
||||||
|
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
|
||||||
|
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
|
||||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
||||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
|
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
|
||||||
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
|
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
|
||||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
|
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
|
||||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows executable (no auto-update)
|
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
||||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||||
[yt-dlp_macos_legacy](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos_legacy)|MacOS (10.9+) standalone x64 executable
|
|
||||||
|
|
||||||
#### Misc
|
#### Misc
|
||||||
|
|
||||||
@@ -129,6 +138,17 @@ #### Misc
|
|||||||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Licensing
|
||||||
|
|
||||||
|
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||||
|
|
||||||
|
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||||
|
|
||||||
|
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for details.
|
||||||
|
|
||||||
|
The zipimport binary (`yt-dlp`), the source tarball (`yt-dlp.tar.gz`), and the PyPI source distribution & wheel only contain code licensed under the [Unlicense](LICENSE).
|
||||||
|
|
||||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||||
|
|
||||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||||
@@ -170,8 +190,11 @@ # To install nightly with pip:
|
|||||||
python3 -m pip install -U --pre "yt-dlp[default]"
|
python3 -m pip install -U --pre "yt-dlp[default]"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
When running a yt-dlp version that is older than 90 days, you will see a warning message suggesting to update to the latest version.
|
||||||
|
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||||
|
|
||||||
## DEPENDENCIES
|
## DEPENDENCIES
|
||||||
Python versions 3.9+ (CPython) and 3.10+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
Python versions 3.9+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||||
|
|
||||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||||
@@ -200,14 +223,14 @@ #### Impersonation
|
|||||||
|
|
||||||
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
||||||
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
||||||
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds
|
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
|
||||||
|
|
||||||
|
|
||||||
### Metadata
|
### Metadata
|
||||||
|
|
||||||
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
||||||
* [**AtomicParsley**](https://github.com/wez/atomicparsley) - For `--embed-thumbnail` in `mp4`/`m4a` files when `mutagen`/`ffmpeg` cannot. Licensed under [GPLv2+](https://github.com/wez/atomicparsley/blob/master/COPYING)
|
* [**AtomicParsley**](https://github.com/wez/atomicparsley) - For `--embed-thumbnail` in `mp4`/`m4a` files when `mutagen`/`ffmpeg` cannot. Licensed under [GPLv2+](https://github.com/wez/atomicparsley/blob/master/COPYING)
|
||||||
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattr`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattrs`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
||||||
|
|
||||||
### Misc
|
### Misc
|
||||||
|
|
||||||
@@ -218,8 +241,6 @@ ### Misc
|
|||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
|
|
||||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
|
||||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
|
||||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||||
|
|
||||||
@@ -276,7 +297,7 @@ # USAGE AND OPTIONS
|
|||||||
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
||||||
yt-dlp [OPTIONS] [--] URL [URL...]
|
yt-dlp [OPTIONS] [--] URL [URL...]
|
||||||
|
|
||||||
`Ctrl+F` is your friend :D
|
Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||||
|
|
||||||
<!-- Auto generated -->
|
<!-- Auto generated -->
|
||||||
@@ -299,7 +320,6 @@ ## General Options:
|
|||||||
playlist (default)
|
playlist (default)
|
||||||
--abort-on-error Abort downloading of further videos if an
|
--abort-on-error Abort downloading of further videos if an
|
||||||
error occurs (Alias: --no-ignore-errors)
|
error occurs (Alias: --no-ignore-errors)
|
||||||
--dump-user-agent Display the current user-agent and exit
|
|
||||||
--list-extractors List all supported extractors and exit
|
--list-extractors List all supported extractors and exit
|
||||||
--extractor-descriptions Output descriptions of all supported
|
--extractor-descriptions Output descriptions of all supported
|
||||||
extractors and exit
|
extractors and exit
|
||||||
@@ -348,8 +368,8 @@ ## General Options:
|
|||||||
--no-flat-playlist Fully extract the videos of a playlist
|
--no-flat-playlist Fully extract the videos of a playlist
|
||||||
(default)
|
(default)
|
||||||
--live-from-start Download livestreams from the start.
|
--live-from-start Download livestreams from the start.
|
||||||
Currently only supported for YouTube
|
Currently experimental and only supported
|
||||||
(Experimental)
|
for YouTube and Twitch
|
||||||
--no-live-from-start Download livestreams from the current time
|
--no-live-from-start Download livestreams from the current time
|
||||||
(default)
|
(default)
|
||||||
--wait-for-video MIN[-MAX] Wait for scheduled streams to become
|
--wait-for-video MIN[-MAX] Wait for scheduled streams to become
|
||||||
@@ -375,12 +395,12 @@ ## General Options:
|
|||||||
an alias starts with a dash "-", it is
|
an alias starts with a dash "-", it is
|
||||||
prefixed with "--". Arguments are parsed
|
prefixed with "--". Arguments are parsed
|
||||||
according to the Python string formatting
|
according to the Python string formatting
|
||||||
mini-language. E.g. --alias get-audio,-X
|
mini-language. E.g. --alias get-audio,-X "-S
|
||||||
"-S=aext:{0},abr -x --audio-format {0}"
|
aext:{0},abr -x --audio-format {0}" creates
|
||||||
creates options "--get-audio" and "-X" that
|
options "--get-audio" and "-X" that takes an
|
||||||
takes an argument (ARG0) and expands to
|
argument (ARG0) and expands to "-S
|
||||||
"-S=aext:ARG0,abr -x --audio-format ARG0".
|
aext:ARG0,abr -x --audio-format ARG0". All
|
||||||
All defined aliases are listed in the --help
|
defined aliases are listed in the --help
|
||||||
output. Alias options can trigger more
|
output. Alias options can trigger more
|
||||||
aliases; so be careful to avoid defining
|
aliases; so be careful to avoid defining
|
||||||
recursive options. As a safety measure, each
|
recursive options. As a safety measure, each
|
||||||
@@ -550,8 +570,6 @@ ## Download Options:
|
|||||||
--playlist-random and --playlist-reverse
|
--playlist-random and --playlist-reverse
|
||||||
--no-lazy-playlist Process videos in the playlist only after
|
--no-lazy-playlist Process videos in the playlist only after
|
||||||
the entire playlist is parsed (default)
|
the entire playlist is parsed (default)
|
||||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
|
||||||
expected file size
|
|
||||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||||
allowing some players to play the video
|
allowing some players to play the video
|
||||||
while downloading, and reducing the chance
|
while downloading, and reducing the chance
|
||||||
@@ -575,9 +593,9 @@ ## Download Options:
|
|||||||
use (optionally) prefixed by the protocols
|
use (optionally) prefixed by the protocols
|
||||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||||
use it for. Currently supports native,
|
use it for. Currently supports native,
|
||||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||||
wget. You can use this option multiple times
|
You can use this option multiple times to
|
||||||
to set different downloaders for different
|
set different downloaders for different
|
||||||
protocols. E.g. --downloader aria2c
|
protocols. E.g. --downloader aria2c
|
||||||
--downloader "dash,m3u8:native" will use
|
--downloader "dash,m3u8:native" will use
|
||||||
aria2c for http/ftp downloads, and the
|
aria2c for http/ftp downloads, and the
|
||||||
@@ -638,9 +656,9 @@ ## Filesystem Options:
|
|||||||
--no-part Do not use .part files - write directly into
|
--no-part Do not use .part files - write directly into
|
||||||
output file
|
output file
|
||||||
--mtime Use the Last-modified header to set the file
|
--mtime Use the Last-modified header to set the file
|
||||||
modification time (default)
|
modification time
|
||||||
--no-mtime Do not use the Last-modified header to set
|
--no-mtime Do not use the Last-modified header to set
|
||||||
the file modification time
|
the file modification time (default)
|
||||||
--write-description Write video description to a .description file
|
--write-description Write video description to a .description file
|
||||||
--no-write-description Do not write video description (default)
|
--no-write-description Do not write video description (default)
|
||||||
--write-info-json Write video metadata to a .info.json file
|
--write-info-json Write video metadata to a .info.json file
|
||||||
@@ -1105,6 +1123,10 @@ ## Extractor Options:
|
|||||||
arguments for different extractors
|
arguments for different extractors
|
||||||
|
|
||||||
## Preset Aliases:
|
## Preset Aliases:
|
||||||
|
Predefined aliases for convenience and ease of use. Note that future
|
||||||
|
versions of yt-dlp may add or adjust presets, but the existing preset
|
||||||
|
names will not be changed or removed
|
||||||
|
|
||||||
-t mp3 -f 'ba[acodec^=mp3]/ba/b' -x --audio-format
|
-t mp3 -f 'ba[acodec^=mp3]/ba/b' -x --audio-format
|
||||||
mp3
|
mp3
|
||||||
|
|
||||||
@@ -1151,15 +1173,15 @@ # CONFIGURATION
|
|||||||
* `/etc/yt-dlp/config`
|
* `/etc/yt-dlp/config`
|
||||||
* `/etc/yt-dlp/config.txt`
|
* `/etc/yt-dlp/config.txt`
|
||||||
|
|
||||||
E.g. with the following configuration file, yt-dlp will always extract the audio, not copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
E.g. with the following configuration file, yt-dlp will always extract the audio, copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
||||||
```
|
```
|
||||||
# Lines starting with # are comments
|
# Lines starting with # are comments
|
||||||
|
|
||||||
# Always extract audio
|
# Always extract audio
|
||||||
-x
|
-x
|
||||||
|
|
||||||
# Do not copy the mtime
|
# Copy the mtime
|
||||||
--no-mtime
|
--mtime
|
||||||
|
|
||||||
# Use this proxy
|
# Use this proxy
|
||||||
--proxy 127.0.0.1:3128
|
--proxy 127.0.0.1:3128
|
||||||
@@ -1790,11 +1812,14 @@ # EXTRACTOR ARGUMENTS
|
|||||||
The following extractors use this feature:
|
The following extractors use this feature:
|
||||||
|
|
||||||
#### youtube
|
#### youtube
|
||||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube.py](https://github.com/yt-dlp/yt-dlp/blob/c26f9b991a0681fd3ea548d535919cec1fbbd430/yt_dlp/extractor/youtube.py#L381-L390) for list of supported content language codes
|
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv` and `tv_embedded`. By default, `tv,ios,web` is used, or `tv,web` is used when authenticating with cookies. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `tv_simply,tv,web` is used, but `tv,web_safari,web` is used when authenticating with cookies and `tv,web_creator,web` is used with premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||||
|
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||||
|
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||||
|
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash`. Currently, the default is to force `20348@0004de42`. You can use `actual` to go with what is prescribed by the site
|
||||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||||
@@ -1804,8 +1829,13 @@ #### youtube
|
|||||||
* `raise_incomplete_data`: `Incomplete Data Received` raises an error instead of reporting a warning
|
* `raise_incomplete_data`: `Incomplete Data Received` raises an error instead of reporting a warning
|
||||||
* `data_sync_id`: Overrides the account Data Sync ID used in Innertube API requests. This may be needed if you are using an account with `youtube:player_skip=webpage,configs` or `youtubetab:skip=webpage`
|
* `data_sync_id`: Overrides the account Data Sync ID used in Innertube API requests. This may be needed if you are using an account with `youtube:player_skip=webpage,configs` or `youtubetab:skip=webpage`
|
||||||
* `visitor_data`: Overrides the Visitor Data used in Innertube API requests. This should be used with `player_skip=webpage,configs` and without cookies. Note: this may have adverse effects if used improperly. If a session from a browser is wanted, you should pass cookies instead (which contain the Visitor ID)
|
* `visitor_data`: Overrides the Visitor Data used in Innertube API requests. This should be used with `player_skip=webpage,configs` and without cookies. Note: this may have adverse effects if used improperly. If a session from a browser is wanted, you should pass cookies instead (which contain the Visitor ID)
|
||||||
* `po_token`: Proof of Origin (PO) Token(s) to use. Comma seperated list of PO Tokens in the format `CLIENT.CONTEXT+PO_TOKEN`, e.g. `youtube:po_token=web.gvs+XXX,web.player=XXX,web_safari.gvs+YYY`. Context can be either `gvs` (Google Video Server URLs) or `player` (Innertube player request)
|
* `po_token`: Proof of Origin (PO) Token(s) to use. Comma seperated list of PO Tokens in the format `CLIENT.CONTEXT+PO_TOKEN`, e.g. `youtube:po_token=web.gvs+XXX,web.player=XXX,web_safari.gvs+YYY`. Context can be any of `gvs` (Google Video Server URLs), `player` (Innertube player request) or `subs` (Subtitles)
|
||||||
* `player_js_variant`: The player javascript variant to use for signature and nsig deciphering. The known variants are: `main`, `tce`, `tv`, `tv_es6`, `phone`, `tablet`. Only `main` is recommended as a possible workaround; the others are for debugging purposes. The default is to use what is prescribed by the site, and can be selected with `actual`
|
* `pot_trace`: Enable debug logging for PO Token fetching. Either `true` or `false` (default)
|
||||||
|
* `fetch_pot`: Policy to use for fetching a PO Token from providers. One of `always` (always try fetch a PO Token regardless if the client requires one for the given context), `never` (never fetch a PO Token), or `auto` (default; only fetch a PO Token if the client requires one for the given context)
|
||||||
|
* `playback_wait`: Duration (in seconds) to wait inbetween the extraction and download stages in order to ensure the formats are available. The default is `6` seconds
|
||||||
|
|
||||||
|
#### youtubepot-webpo
|
||||||
|
* `bind_to_visitor_id`: Whether to use the Visitor ID instead of Visitor Data for caching WebPO tokens. Either `true` (default) or `false`
|
||||||
|
|
||||||
#### youtubetab (YouTube playlists, channels, feeds, etc.)
|
#### youtubetab (YouTube playlists, channels, feeds, etc.)
|
||||||
* `skip`: One or more of `webpage` (skip initial webpage download), `authcheck` (allow the download of playlists requiring authentication when no initial webpage is downloaded. This may cause unwanted behavior, see [#1122](https://github.com/yt-dlp/yt-dlp/pull/1122) for more details)
|
* `skip`: One or more of `webpage` (skip initial webpage download), `authcheck` (allow the download of playlists requiring authentication when no initial webpage is downloaded. This may cause unwanted behavior, see [#1122](https://github.com/yt-dlp/yt-dlp/pull/1122) for more details)
|
||||||
@@ -1890,6 +1920,10 @@ #### sonylivseries
|
|||||||
#### tver
|
#### tver
|
||||||
* `backend`: Backend API to use for extraction - one of `streaks` (default) or `brightcove` (deprecated)
|
* `backend`: Backend API to use for extraction - one of `streaks` (default) or `brightcove` (deprecated)
|
||||||
|
|
||||||
|
#### vimeo
|
||||||
|
* `client`: Client to extract video data from. The currently available clients are `android`, `ios`, and `web`. Only one client can be used. The `web` client is used by default. The `web` client only works with account cookies or login credentials. The `android` and `ios` clients only work with previously cached OAuth tokens
|
||||||
|
* `original_format_policy`: Policy for when to try extracting original formats. One of `always`, `never`, or `auto`. The default `auto` policy tries to avoid exceeding the web client's API rate-limit by only making an extra request when Vimeo publicizes the video's downloadability
|
||||||
|
|
||||||
**Note**: These options may be changed/removed in the future without concern for backward compatibility
|
**Note**: These options may be changed/removed in the future without concern for backward compatibility
|
||||||
|
|
||||||
<!-- MANPAGE: MOVE "INSTALLATION" SECTION HERE -->
|
<!-- MANPAGE: MOVE "INSTALLATION" SECTION HERE -->
|
||||||
@@ -2180,7 +2214,6 @@ ### New features
|
|||||||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||||
* Channel URLs download all uploads of the channel, including shorts and live
|
* Channel URLs download all uploads of the channel, including shorts and live
|
||||||
* Support for [logging in with OAuth](https://github.com/yt-dlp/yt-dlp/wiki/Extractors#logging-in-with-oauth)
|
|
||||||
|
|
||||||
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
||||||
|
|
||||||
@@ -2252,6 +2285,7 @@ ### Differences in default behavior
|
|||||||
* yt-dlp uses modern http client backends such as `requests`. Use `--compat-options prefer-legacy-http-handler` to prefer the legacy http handler (`urllib`) to be used for standard http requests.
|
* yt-dlp uses modern http client backends such as `requests`. Use `--compat-options prefer-legacy-http-handler` to prefer the legacy http handler (`urllib`) to be used for standard http requests.
|
||||||
* The sub-modules `swfinterp`, `casefold` are removed.
|
* The sub-modules `swfinterp`, `casefold` are removed.
|
||||||
* Passing `--simulate` (or calling `extract_info` with `download=False`) no longer alters the default format selection. See [#9843](https://github.com/yt-dlp/yt-dlp/issues/9843) for details.
|
* Passing `--simulate` (or calling `extract_info` with `download=False`) no longer alters the default format selection. See [#9843](https://github.com/yt-dlp/yt-dlp/issues/9843) for details.
|
||||||
|
* yt-dlp no longer applies the server modified time to downloaded files by default. Use `--mtime` or `--compat-options mtime-by-default` to revert this.
|
||||||
|
|
||||||
For ease of use, a few more compat options are available:
|
For ease of use, a few more compat options are available:
|
||||||
|
|
||||||
@@ -2261,7 +2295,7 @@ ### Differences in default behavior
|
|||||||
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization`
|
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization`
|
||||||
* `--compat-options 2022`: Same as `--compat-options 2023,playlist-match-filter,no-external-downloader-progress,prefer-legacy-http-handler,manifest-filesize-approx`
|
* `--compat-options 2022`: Same as `--compat-options 2023,playlist-match-filter,no-external-downloader-progress,prefer-legacy-http-handler,manifest-filesize-approx`
|
||||||
* `--compat-options 2023`: Same as `--compat-options 2024,prefer-vp9-sort`
|
* `--compat-options 2023`: Same as `--compat-options 2024,prefer-vp9-sort`
|
||||||
* `--compat-options 2024`: Currently does nothing. Use this to enable all future compat options
|
* `--compat-options 2024`: Same as `--compat-options mtime-by-default`. Use this to enable all future compat options
|
||||||
|
|
||||||
The following compat options restore vulnerable behavior from before security patches:
|
The following compat options restore vulnerable behavior from before security patches:
|
||||||
|
|
||||||
@@ -2323,11 +2357,7 @@ #### Not recommended
|
|||||||
--hls-prefer-native --downloader "m3u8:native"
|
--hls-prefer-native --downloader "m3u8:native"
|
||||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
--list-formats-as-table --compat-options -list-formats [Default]
|
||||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
|
||||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
|
||||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
|
||||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
|
||||||
--geo-bypass --xff "default"
|
--geo-bypass --xff "default"
|
||||||
--no-geo-bypass --xff "never"
|
--no-geo-bypass --xff "never"
|
||||||
--geo-bypass-country CODE --xff CODE
|
--geo-bypass-country CODE --xff CODE
|
||||||
@@ -2338,20 +2368,14 @@ #### Developer options
|
|||||||
|
|
||||||
--test Download only part of video for testing extractors
|
--test Download only part of video for testing extractors
|
||||||
--load-pages Load pages dumped by --write-pages
|
--load-pages Load pages dumped by --write-pages
|
||||||
--youtube-print-sig-code For testing youtube signatures
|
|
||||||
--allow-unplayable-formats List unplayable formats also
|
--allow-unplayable-formats List unplayable formats also
|
||||||
--no-allow-unplayable-formats Default
|
--no-allow-unplayable-formats Default
|
||||||
|
|
||||||
#### Old aliases
|
#### Old aliases
|
||||||
These are aliases that are no longer documented for various reasons
|
These are aliases that are no longer documented for various reasons
|
||||||
|
|
||||||
--avconv-location --ffmpeg-location
|
|
||||||
--clean-infojson --clean-info-json
|
--clean-infojson --clean-info-json
|
||||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
|
||||||
--dump-headers --print-traffic
|
|
||||||
--dump-intermediate-pages --dump-pages
|
|
||||||
--force-write-download-archive --force-write-archive
|
--force-write-download-archive --force-write-archive
|
||||||
--load-info --load-info-json
|
|
||||||
--no-clean-infojson --no-clean-info-json
|
--no-clean-infojson --no-clean-info-json
|
||||||
--no-split-tracks --no-split-chapters
|
--no-split-tracks --no-split-chapters
|
||||||
--no-write-srt --no-write-subs
|
--no-write-srt --no-write-subs
|
||||||
@@ -2364,7 +2388,7 @@ #### Old aliases
|
|||||||
--yes-overwrites --force-overwrites
|
--yes-overwrites --force-overwrites
|
||||||
|
|
||||||
#### Sponskrub Options
|
#### Sponskrub Options
|
||||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||||
|
|
||||||
--sponskrub --sponsorblock-mark all
|
--sponskrub --sponsorblock-mark all
|
||||||
--no-sponskrub --no-sponsorblock
|
--no-sponskrub --no-sponsorblock
|
||||||
@@ -2386,6 +2410,17 @@ #### No longer supported
|
|||||||
--no-include-ads Default
|
--no-include-ads Default
|
||||||
--write-annotations No supported site has annotations now
|
--write-annotations No supported site has annotations now
|
||||||
--no-write-annotations Default
|
--no-write-annotations Default
|
||||||
|
--avconv-location Removed alias for --ffmpeg-location
|
||||||
|
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||||
|
--dump-headers Removed alias for --print-traffic
|
||||||
|
--dump-intermediate-pages Removed alias for --dump-pages
|
||||||
|
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||||
|
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||||
|
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||||
|
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||||
|
--youtube-print-sig-code Removed testing functionality
|
||||||
|
--dump-user-agent No longer supported
|
||||||
|
--xattr-set-filesize No longer supported
|
||||||
--compat-options seperate-video-versions No longer needed
|
--compat-options seperate-video-versions No longer needed
|
||||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||||
|
|
||||||
|
|||||||
4433
THIRD_PARTY_LICENSES.txt
Normal file
4433
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,178 @@
|
|||||||
services:
|
services:
|
||||||
static:
|
|
||||||
build: static
|
linux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
environment:
|
environment:
|
||||||
channel: ${channel}
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
origin: ${origin}
|
CHANNEL: ${CHANNEL:?}
|
||||||
version: ${version}
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
volumes:
|
volumes:
|
||||||
- ~/build:/build
|
|
||||||
- ../..:/yt-dlp
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_armv7l:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
- ../../venv:/yt-dlp-build-venv
|
||||||
|
|
||||||
|
linux_armv7l_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
EXCLUDE_CURL_CFFI: "1"
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|||||||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
|
ARG VERIFYIMAGE=alpine:3.22
|
||||||
|
|
||||||
|
|
||||||
|
FROM $BUILDIMAGE AS build
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY build.sh /build.sh
|
||||||
|
ENTRYPOINT ["/build.sh"]
|
||||||
|
|
||||||
|
|
||||||
|
FROM $VERIFYIMAGE AS verify
|
||||||
|
|
||||||
|
WORKDIR /testing
|
||||||
|
COPY verify.sh /verify.sh
|
||||||
|
ENTRYPOINT ["/verify.sh"]
|
||||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||||
|
PYTHON_VERSION="3.13"
|
||||||
|
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
function runpy {
|
||||||
|
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function venvpy {
|
||||||
|
"python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
INCLUDES=(
|
||||||
|
--include pyinstaller
|
||||||
|
--include secretstorage
|
||||||
|
)
|
||||||
|
|
||||||
|
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||||
|
INCLUDES+=(--include curl-cffi)
|
||||||
|
fi
|
||||||
|
|
||||||
|
runpy -m venv /yt-dlp-build-venv
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /yt-dlp-build-venv/bin/activate
|
||||||
|
# Inside the venv we use venvpy instead of runpy
|
||||||
|
venvpy -m ensurepip --upgrade --default-pip
|
||||||
|
venvpy -m devscripts.install_deps -o --include build
|
||||||
|
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||||
|
venvpy -m devscripts.make_lazy_extractors
|
||||||
|
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||||
|
mkdir -p /build
|
||||||
|
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||||
|
pushd "/build/${EXE_NAME}"
|
||||||
|
chmod +x "${EXE_NAME}"
|
||||||
|
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||||
|
popd
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||||
|
venvpy -m bundle.pyinstaller
|
||||||
|
chmod +x "./dist/${EXE_NAME}"
|
||||||
|
fi
|
||||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||||
|
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||||
|
echo "All executable builds were skipped"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Extracting zip to verify onedir build"
|
||||||
|
if command -v python3 >/dev/null 2>&1; then
|
||||||
|
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||||
|
else
|
||||||
|
echo "Attempting to install unzip"
|
||||||
|
if command -v dnf >/dev/null 2>&1; then
|
||||||
|
dnf -y install --allowerasing unzip
|
||||||
|
elif command -v yum >/dev/null 2>&1; then
|
||||||
|
yum -y install unzip
|
||||||
|
elif command -v apt-get >/dev/null 2>&1; then
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||||
|
elif command -v apk >/dev/null 2>&1; then
|
||||||
|
apk add --no-cache unzip
|
||||||
|
else
|
||||||
|
echo "Unsupported image"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||||
|
fi
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verifying onefile build"
|
||||||
|
cp "/build/${EXE_NAME}" ./
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
|
||||||
|
if [ -z "${UPDATE_TO:-}" ]; then
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||||
|
version="$("./${EXE_NAME}" --version)"
|
||||||
|
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||||
|
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||||
|
if [ "${version}" = "${downgraded_version}" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
FROM alpine:3.19 as base
|
|
||||||
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
build-base \
|
|
||||||
python3 \
|
|
||||||
pipx \
|
|
||||||
;
|
|
||||||
|
|
||||||
RUN pipx install pyinstaller
|
|
||||||
# Requires above step to prepare the shared venv
|
|
||||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
scons \
|
|
||||||
patchelf \
|
|
||||||
binutils \
|
|
||||||
;
|
|
||||||
RUN pipx install staticx
|
|
||||||
|
|
||||||
WORKDIR /yt-dlp
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
|
||||||
ENTRYPOINT /entrypoint.sh
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/ash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
|
||||||
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
|
||||||
python -m devscripts.make_lazy_extractors
|
|
||||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
|
||||||
python -m bundle.pyinstaller
|
|
||||||
deactivate
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
|
||||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
|
||||||
deactivate
|
|
||||||
@@ -13,6 +13,8 @@
|
|||||||
from devscripts.utils import read_version
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
|
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||||
|
OS_NAME = 'musllinux'
|
||||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
MACHINE = 'x86' if ARCH == '32' else ''
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
@@ -36,6 +38,9 @@ def main():
|
|||||||
f'--name={name}',
|
f'--name={name}',
|
||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13311
|
||||||
|
# https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
|
'--exclude-module=pkg_resources',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
*opts,
|
*opts,
|
||||||
@@ -59,16 +64,22 @@ def parse_options():
|
|||||||
|
|
||||||
def exe(onedir):
|
def exe(onedir):
|
||||||
"""@returns (name, path)"""
|
"""@returns (name, path)"""
|
||||||
|
platform_name, machine, extension = {
|
||||||
|
'win32': (None, MACHINE, '.exe'),
|
||||||
|
'darwin': ('macos', None, None),
|
||||||
|
}.get(OS_NAME, (OS_NAME, MACHINE, None))
|
||||||
|
|
||||||
name = '_'.join(filter(None, (
|
name = '_'.join(filter(None, (
|
||||||
'yt-dlp',
|
'yt-dlp',
|
||||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
platform_name,
|
||||||
MACHINE,
|
machine,
|
||||||
)))
|
)))
|
||||||
|
|
||||||
return name, ''.join(filter(None, (
|
return name, ''.join(filter(None, (
|
||||||
'dist/',
|
'dist/',
|
||||||
onedir and f'{name}/',
|
onedir and f'{name}/',
|
||||||
name,
|
name,
|
||||||
OS_NAME == 'win32' and '.exe',
|
extension,
|
||||||
)))
|
)))
|
||||||
|
|
||||||
|
|
||||||
@@ -118,7 +129,6 @@ def windows_set_version(exe, version):
|
|||||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
|
||||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||||
StringStruct(
|
StringStruct(
|
||||||
|
|||||||
@@ -6,13 +6,17 @@ __yt_dlp()
|
|||||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||||
opts="{{flags}}"
|
opts="{{flags}}"
|
||||||
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
||||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info"
|
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info-json"
|
||||||
diropts="--cache-dir"
|
diropts="--cache-dir"
|
||||||
|
|
||||||
if [[ ${prev} =~ ${fileopts} ]]; then
|
if [[ ${prev} =~ ${fileopts} ]]; then
|
||||||
|
local IFS=$'\n'
|
||||||
|
type compopt &>/dev/null && compopt -o filenames
|
||||||
COMPREPLY=( $(compgen -f -- ${cur}) )
|
COMPREPLY=( $(compgen -f -- ${cur}) )
|
||||||
return 0
|
return 0
|
||||||
elif [[ ${prev} =~ ${diropts} ]]; then
|
elif [[ ${prev} =~ ${diropts} ]]; then
|
||||||
|
local IFS=$'\n'
|
||||||
|
type compopt &>/dev/null && compopt -o dirnames
|
||||||
COMPREPLY=( $(compgen -d -- ${cur}) )
|
COMPREPLY=( $(compgen -d -- ${cur}) )
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -254,5 +254,49 @@
|
|||||||
{
|
{
|
||||||
"action": "remove",
|
"action": "remove",
|
||||||
"when": "d596824c2f8428362c072518856065070616e348"
|
"when": "d596824c2f8428362c072518856065070616e348"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "7b81634fb1d15999757e7a9883daa6ef09ea785b"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "500761e41acb96953a5064e951d41d190c287e46"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "f3008bc5f89d2691f2f8dfc51b406ef4e25281c3",
|
||||||
|
"short": "[priority] **Default behaviour changed from `--mtime` to `--no-mtime`**\nyt-dlp no longer applies the server modified time to downloaded files by default. [Read more](https://github.com/yt-dlp/yt-dlp/issues/12780)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "959ac99e98c3215437e573c22d64be42d361e863",
|
||||||
|
"short": "[priority] Security: [[CVE-2025-54072](https://nvd.nist.gov/vuln/detail/CVE-2025-54072)] [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-45hg-7f49-5h56)\n - When `--exec` is used on Windows, the filepath expanded from `{}` (or the default placeholder) is now properly escaped"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b831406a1d3be34c159835079d12bae624c43610",
|
||||||
|
"short": "[ie/rtve.es:program] Add extractor (#12955)",
|
||||||
|
"authors": ["meGAmeS1", "seproDev"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "23c658b9cbe34a151f8f921ab1320bb5d4e40a4d",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.10**\nSince Python 3.9 will reach end-of-life in October 2025, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "cc5a5caac5fbc0d605b52bde0778d6fd5f97b5ab",
|
||||||
|
"short": "[priority] **darwin_legacy_exe builds are being discontinued**\nThis release's `yt-dlp_macos_legacy` binary will likely be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13856)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||||
|
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||||
|
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ def parse_patched_options(opts):
|
|||||||
'fragment_retries': 0,
|
'fragment_retries': 0,
|
||||||
'extract_flat': False,
|
'extract_flat': False,
|
||||||
'concat_playlist': 'never',
|
'concat_playlist': 'never',
|
||||||
|
'update_self': False,
|
||||||
})
|
})
|
||||||
yt_dlp.options.create_parser = lambda: patched_parser
|
yt_dlp.options.create_parser = lambda: patched_parser
|
||||||
try:
|
try:
|
||||||
|
|||||||
316
devscripts/generate_third_party_licenses.py
Normal file
316
devscripts/generate_third_party_licenses.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import requests
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||||
|
CACHE_LOCATION = '.license_cache'
|
||||||
|
HEADER = '''THIRD-PARTY LICENSES
|
||||||
|
|
||||||
|
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||||
|
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||||
|
Source code for bundled third-party components is available from the original projects.
|
||||||
|
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Dependency:
|
||||||
|
name: str
|
||||||
|
license_url: str
|
||||||
|
project_url: str = ''
|
||||||
|
license: str = ''
|
||||||
|
comment: str = ''
|
||||||
|
|
||||||
|
|
||||||
|
DEPENDENCIES: list[Dependency] = [
|
||||||
|
# Core runtime environment components
|
||||||
|
Dependency(
|
||||||
|
name='Python',
|
||||||
|
license='PSF-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://www.python.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Microsoft Distributable Code',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||||
|
comment='Only included in Windows builds',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='bzip2',
|
||||||
|
license='bzip2-1.0.6',
|
||||||
|
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||||
|
project_url='https://sourceware.org/bzip2/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://sourceware.org/libffi/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='OpenSSL 3.0+',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||||
|
project_url='https://www.openssl.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SQLite',
|
||||||
|
license='Public Domain', # Technically does not need to be included
|
||||||
|
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||||
|
project_url='https://www.sqlite.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='liblzma',
|
||||||
|
license='0BSD', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||||
|
project_url='https://tukaani.org/xz/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mpdecimal',
|
||||||
|
license='BSD-2-Clause',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||||
|
project_url='https://www.bytereef.org/mpdecimal/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zlib',
|
||||||
|
license='zlib',
|
||||||
|
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||||
|
project_url='https://zlib.net/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Expat',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||||
|
project_url='https://libexpat.github.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='ncurses',
|
||||||
|
license='X11-distribute-modifications-variant',
|
||||||
|
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||||
|
comment='Only included in Linux/macOS builds',
|
||||||
|
project_url='https://invisible-island.net/ncurses/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='GNU Readline',
|
||||||
|
license='GPL-3.0-or-later',
|
||||||
|
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://www.gnu.org/software/readline/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libstdc++',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libgcc',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libuuid',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libintl',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/gettext/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2 (Unicode character data files)',
|
||||||
|
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libunistring',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libunistring/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='librtmp',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zstd',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://facebook.github.io/zstd/',
|
||||||
|
),
|
||||||
|
|
||||||
|
# Python packages
|
||||||
|
Dependency(
|
||||||
|
name='brotli',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://brotli.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='curl_cffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://curl-cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of curl_cffi
|
||||||
|
Dependency(
|
||||||
|
name='curl-impersonate',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='cffi',
|
||||||
|
license='MIT-0', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependecy of cffi
|
||||||
|
Dependency(
|
||||||
|
name='pycparser',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://github.com/eliben/pycparser',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mutagen',
|
||||||
|
license='GPL-2.0-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||||
|
project_url='https://mutagen.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='PyCryptodome',
|
||||||
|
license='Public Domain and BSD-2-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||||
|
project_url='https://www.pycryptodome.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='certifi',
|
||||||
|
license='MPL-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://github.com/certifi/python-certifi',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='requests',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://requests.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='charset-normalizer',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://charset-normalizer.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='idna',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||||
|
project_url='https://github.com/kjd/idna',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='urllib3',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||||
|
project_url='https://urllib3.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SecretStorage',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://secretstorage.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='cryptography',
|
||||||
|
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||||
|
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://cryptography.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='Jeepney',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://jeepney.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='websockets',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://websockets.readthedocs.io/',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_text(dep: Dependency) -> str:
|
||||||
|
cache_dir = Path(CACHE_LOCATION)
|
||||||
|
cache_dir.mkdir(exist_ok=True)
|
||||||
|
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||||
|
cache_file = cache_dir / f'{url_hash}.txt'
|
||||||
|
|
||||||
|
if cache_file.exists():
|
||||||
|
return cache_file.read_text()
|
||||||
|
|
||||||
|
# UA needed since some domains block requests default UA
|
||||||
|
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||||
|
req.raise_for_status()
|
||||||
|
text = req.text
|
||||||
|
cache_file.write_text(text)
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def build_output() -> str:
|
||||||
|
lines = [HEADER]
|
||||||
|
for d in DEPENDENCIES:
|
||||||
|
lines.append('\n')
|
||||||
|
lines.append('-' * 80)
|
||||||
|
header = f'{d.name}'
|
||||||
|
if d.license:
|
||||||
|
header += f' | {d.license}'
|
||||||
|
if d.comment:
|
||||||
|
header += f'\nNote: {d.comment}'
|
||||||
|
if d.project_url:
|
||||||
|
header += f'\nURL: {d.project_url}'
|
||||||
|
lines.append(header)
|
||||||
|
lines.append('-' * 80)
|
||||||
|
|
||||||
|
text = fetch_text(d)
|
||||||
|
lines.append(text.strip('\n') + '\n')
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
content = build_output()
|
||||||
|
Path(DEFAULT_OUTPUT).write_text(content)
|
||||||
@@ -8,7 +8,7 @@ def main():
|
|||||||
return # This is unused in yt-dlp
|
return # This is unused in yt-dlp
|
||||||
|
|
||||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||||
options, args = parser.parse_args()
|
_, args = parser.parse_args()
|
||||||
if len(args) != 2:
|
if len(args) != 2:
|
||||||
parser.error('Expected an input and an output filename')
|
parser.error('Expected an input and an output filename')
|
||||||
|
|
||||||
|
|||||||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||||
|
|
||||||
|
|
||||||
|
def setup_variables(environment):
|
||||||
|
"""
|
||||||
|
`environment` must contain these keys:
|
||||||
|
REPOSITORY, INPUTS, PROCESSED,
|
||||||
|
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||||
|
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||||
|
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||||
|
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN
|
||||||
|
|
||||||
|
`INPUTS` must contain these keys:
|
||||||
|
prerelease
|
||||||
|
|
||||||
|
`PROCESSED` must contain these keys:
|
||||||
|
source_repo, source_tag,
|
||||||
|
target_repo, target_tag
|
||||||
|
"""
|
||||||
|
REPOSITORY = environment['REPOSITORY']
|
||||||
|
INPUTS = json.loads(environment['INPUTS'])
|
||||||
|
PROCESSED = json.loads(environment['PROCESSED'])
|
||||||
|
|
||||||
|
source_channel = None
|
||||||
|
does_not_have_needed_token = False
|
||||||
|
target_repo_token = None
|
||||||
|
pypi_project = None
|
||||||
|
pypi_suffix = None
|
||||||
|
|
||||||
|
source_repo = PROCESSED['source_repo']
|
||||||
|
source_tag = PROCESSED['source_tag']
|
||||||
|
if source_repo == 'stable':
|
||||||
|
source_repo = STABLE_REPOSITORY
|
||||||
|
if not source_repo:
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||||
|
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||||
|
elif not source_tag and '/' not in source_repo:
|
||||||
|
source_tag = source_repo
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
|
||||||
|
resolved_source = source_repo
|
||||||
|
if source_tag:
|
||||||
|
resolved_source = f'{resolved_source}@{source_tag}'
|
||||||
|
elif source_repo == STABLE_REPOSITORY:
|
||||||
|
resolved_source = 'stable'
|
||||||
|
|
||||||
|
revision = None
|
||||||
|
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||||
|
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||||
|
|
||||||
|
version = calculate_version(INPUTS.get('version') or revision)
|
||||||
|
|
||||||
|
target_repo = PROCESSED['target_repo']
|
||||||
|
target_tag = PROCESSED['target_tag']
|
||||||
|
if target_repo:
|
||||||
|
if target_repo == 'stable':
|
||||||
|
target_repo = STABLE_REPOSITORY
|
||||||
|
if not target_tag:
|
||||||
|
if target_repo == STABLE_REPOSITORY:
|
||||||
|
target_tag = version
|
||||||
|
elif environment['TARGET_ARCHIVE_REPO']:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
else:
|
||||||
|
target_tag = target_repo
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
if target_repo != REPOSITORY:
|
||||||
|
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||||
|
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
if source_channel:
|
||||||
|
target_repo = source_channel
|
||||||
|
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
|
||||||
|
if does_not_have_needed_token:
|
||||||
|
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||||
|
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||||
|
return None
|
||||||
|
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||||
|
|
||||||
|
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||||
|
pypi_project = environment['PYPI_PROJECT'] or None
|
||||||
|
|
||||||
|
return {
|
||||||
|
'channel': resolved_source,
|
||||||
|
'version': version,
|
||||||
|
'target_repo': target_repo,
|
||||||
|
'target_repo_token': target_repo_token,
|
||||||
|
'target_tag': target_tag,
|
||||||
|
'pypi_project': pypi_project,
|
||||||
|
'pypi_suffix': pypi_suffix,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def process_inputs(inputs):
|
||||||
|
outputs = {}
|
||||||
|
for key in ('source', 'target'):
|
||||||
|
repo, _, tag = inputs.get(key, '').partition('@')
|
||||||
|
outputs[f'{key}_repo'] = repo
|
||||||
|
outputs[f'{key}_tag'] = tag
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if not os.getenv('GITHUB_OUTPUT'):
|
||||||
|
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if 'process_inputs' in sys.argv:
|
||||||
|
inputs = json.loads(os.environ['INPUTS'])
|
||||||
|
print('::group::Inputs')
|
||||||
|
print(json.dumps(inputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
outputs = process_inputs(inputs)
|
||||||
|
print('::group::Processed')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
outputs = setup_variables(dict(os.environ))
|
||||||
|
if not outputs:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print('::group::Output variables')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||||
|
inp = inputs.copy()
|
||||||
|
inp.setdefault('linux_armv7l', True)
|
||||||
|
inp.setdefault('prerelease', False)
|
||||||
|
processed = process_inputs(inp)
|
||||||
|
source_repo = processed['source_repo'].upper()
|
||||||
|
target_repo = processed['target_repo'].upper()
|
||||||
|
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||||
|
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
# Keep this in sync with prepare.setup_variables in release.yml
|
||||||
|
'INPUTS': json.dumps(inp),
|
||||||
|
'PROCESSED': json.dumps(processed),
|
||||||
|
'REPOSITORY': github_repository,
|
||||||
|
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||||
|
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||||
|
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||||
|
}
|
||||||
|
|
||||||
|
result = setup_variables(env)
|
||||||
|
if not expected:
|
||||||
|
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||||
|
return
|
||||||
|
|
||||||
|
exp = expected.copy()
|
||||||
|
if ignore_revision:
|
||||||
|
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||||
|
version_is_tag = result['version'] == result['target_tag']
|
||||||
|
for dct in (result, exp):
|
||||||
|
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||||
|
if version_is_tag:
|
||||||
|
dct['target_tag'] = dct['version']
|
||||||
|
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_variables():
|
||||||
|
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||||
|
DEFAULT_VERSION = calculate_version()
|
||||||
|
BASE_REPO_VARS = {
|
||||||
|
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||||
|
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||||
|
'PUSH_VERSION_COMMIT': '1',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp',
|
||||||
|
}
|
||||||
|
BASE_REPO_SECRETS = {
|
||||||
|
'ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}
|
||||||
|
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||||
|
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||||
|
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': 'stable@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||||
|
{}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||||
|
{}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'nightly',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'master',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||||
|
{}, {}, {'version': '123'}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
}, {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||||
|
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
'MASTER_PYPI_SUFFIX': 'dev',
|
||||||
|
}, {
|
||||||
|
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp-test',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||||
|
{}, {}, {'source': 'experimental'}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'stable',
|
||||||
|
'target': 'experimental',
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
@@ -9,24 +9,9 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime as dt
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from devscripts.utils import read_version, run_process, write_file
|
from devscripts.utils import calculate_version, run_process, write_file
|
||||||
|
|
||||||
|
|
||||||
def get_new_version(version, revision):
|
|
||||||
if not version:
|
|
||||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
|
||||||
|
|
||||||
if revision:
|
|
||||||
assert revision.isdecimal(), 'Revision must be a number'
|
|
||||||
else:
|
|
||||||
old_version = read_version().split('.')
|
|
||||||
if version.split('.') == old_version[:3]:
|
|
||||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
|
||||||
|
|
||||||
return f'{version}.{revision}' if revision else version
|
|
||||||
|
|
||||||
|
|
||||||
def get_git_head():
|
def get_git_head():
|
||||||
@@ -72,9 +57,7 @@ def get_git_head():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
git_head = get_git_head()
|
git_head = get_git_head()
|
||||||
version = (
|
version = calculate_version(args.version)
|
||||||
args.version if args.version and '.' in args.version
|
|
||||||
else get_new_version(None, args.version))
|
|
||||||
write_file(args.output, VERSION_TEMPLATE.format(
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||||
package_version=f'{version}{args.suffix}'))
|
package_version=f'{version}{args.suffix}'))
|
||||||
|
|||||||
@@ -20,7 +20,9 @@
|
|||||||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
help='path to the Changelog file')
|
help='path to the Changelog file')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
new_entry = create_changelog(args)
|
|
||||||
|
|
||||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
current_version = read_version()
|
||||||
|
if current_version != changelog.splitlines()[0]:
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
import datetime as dt
|
||||||
import functools
|
import functools
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
|||||||
return items[varname]
|
return items[varname]
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||||
|
if version and '.' in version:
|
||||||
|
return version
|
||||||
|
|
||||||
|
revision = version
|
||||||
|
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
|
if revision:
|
||||||
|
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||||
|
else:
|
||||||
|
old_version = read_version(fname=fname).split('.')
|
||||||
|
if version.split('.') == old_version[:3]:
|
||||||
|
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||||
|
|
||||||
|
return f'{version}.{revision}' if revision else version
|
||||||
|
|
||||||
|
|
||||||
def get_filename_args(has_infile=False, default_outfile=None):
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
if has_infile:
|
if has_infile:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["hatchling"]
|
requires = ["hatchling>=1.27.0"]
|
||||||
build-backend = "hatchling.build"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
@@ -15,14 +15,15 @@ description = "A feature-rich command-line audio/video downloader"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
keywords = [
|
keywords = [
|
||||||
|
"cli",
|
||||||
|
"downloader",
|
||||||
"youtube-dl",
|
"youtube-dl",
|
||||||
"video-downloader",
|
|
||||||
"youtube-downloader",
|
"youtube-downloader",
|
||||||
"sponsorblock",
|
"sponsorblock",
|
||||||
"youtube-dlc",
|
|
||||||
"yt-dlp",
|
"yt-dlp",
|
||||||
]
|
]
|
||||||
license = {file = "LICENSE"}
|
license = "Unlicense"
|
||||||
|
license-files = ["LICENSE"]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Topic :: Multimedia :: Video",
|
"Topic :: Multimedia :: Video",
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
@@ -34,10 +35,10 @@ classifiers = [
|
|||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
"Programming Language :: Python :: 3.13",
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.14",
|
||||||
"Programming Language :: Python :: Implementation",
|
"Programming Language :: Python :: Implementation",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
@@ -51,11 +52,11 @@ default = [
|
|||||||
"mutagen",
|
"mutagen",
|
||||||
"pycryptodomex",
|
"pycryptodomex",
|
||||||
"requests>=2.32.2,<3",
|
"requests>=2.32.2,<3",
|
||||||
"urllib3>=1.26.17,<3",
|
"urllib3>=2.0.2,<3",
|
||||||
"websockets>=13.0",
|
"websockets>=13.0",
|
||||||
]
|
]
|
||||||
curl-cffi = [
|
curl-cffi = [
|
||||||
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.11; implementation_name=='cpython'",
|
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'",
|
||||||
]
|
]
|
||||||
secretstorage = [
|
secretstorage = [
|
||||||
"cffi",
|
"cffi",
|
||||||
@@ -63,9 +64,9 @@ secretstorage = [
|
|||||||
]
|
]
|
||||||
build = [
|
build = [
|
||||||
"build",
|
"build",
|
||||||
"hatchling",
|
"hatchling>=1.27.0",
|
||||||
"pip",
|
"pip",
|
||||||
"setuptools>=71.0.2", # 71.0.0 broke pyinstaller
|
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
"wheel",
|
"wheel",
|
||||||
]
|
]
|
||||||
dev = [
|
dev = [
|
||||||
@@ -75,14 +76,14 @@ dev = [
|
|||||||
]
|
]
|
||||||
static-analysis = [
|
static-analysis = [
|
||||||
"autopep8~=2.0",
|
"autopep8~=2.0",
|
||||||
"ruff~=0.11.0",
|
"ruff~=0.13.0",
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
"pytest~=8.1",
|
"pytest~=8.1",
|
||||||
"pytest-rerunfailures~=14.0",
|
"pytest-rerunfailures~=14.0",
|
||||||
]
|
]
|
||||||
pyinstaller = [
|
pyinstaller = [
|
||||||
"pyinstaller>=6.11.1", # Windows temp cleanup fixed in 6.11.1
|
"pyinstaller>=6.13.0", # Windows temp cleanup fixed in 6.13.0
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
@@ -107,7 +108,6 @@ include = [
|
|||||||
"/LICENSE", # included as license
|
"/LICENSE", # included as license
|
||||||
"/pyproject.toml", # included by default
|
"/pyproject.toml", # included by default
|
||||||
"/README.md", # included as readme
|
"/README.md", # included as readme
|
||||||
"/setup.cfg",
|
|
||||||
"/supportedsites.md",
|
"/supportedsites.md",
|
||||||
]
|
]
|
||||||
artifacts = [
|
artifacts = [
|
||||||
@@ -173,7 +173,8 @@ python = [
|
|||||||
"3.11",
|
"3.11",
|
||||||
"3.12",
|
"3.12",
|
||||||
"3.13",
|
"3.13",
|
||||||
"pypy3.10",
|
"3.14",
|
||||||
|
"pypy3.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
@@ -210,10 +211,12 @@ ignore = [
|
|||||||
"TD001", # invalid-todo-tag
|
"TD001", # invalid-todo-tag
|
||||||
"TD002", # missing-todo-author
|
"TD002", # missing-todo-author
|
||||||
"TD003", # missing-todo-link
|
"TD003", # missing-todo-link
|
||||||
|
"PLC0415", # import-outside-top-level
|
||||||
"PLE0604", # invalid-all-object (false positives)
|
"PLE0604", # invalid-all-object (false positives)
|
||||||
"PLE0643", # potential-index-error (false positives)
|
"PLE0643", # potential-index-error (false positives)
|
||||||
"PLW0603", # global-statement
|
"PLW0603", # global-statement
|
||||||
"PLW1510", # subprocess-run-without-check
|
"PLW1510", # subprocess-run-without-check
|
||||||
|
"PLW1641", # eq-without-hash
|
||||||
"PLW2901", # redefined-loop-name
|
"PLW2901", # redefined-loop-name
|
||||||
"RUF001", # ambiguous-unicode-character-string
|
"RUF001", # ambiguous-unicode-character-string
|
||||||
"RUF012", # mutable-class-default
|
"RUF012", # mutable-class-default
|
||||||
@@ -313,6 +316,7 @@ banned-from = [
|
|||||||
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||||
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
||||||
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
||||||
|
"yt_dlp.utils.jwt_encode_hs256".msg = "Use `yt_dlp.utils.jwt_encode` instead."
|
||||||
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
||||||
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
||||||
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
||||||
|
|||||||
39
setup.cfg
39
setup.cfg
@@ -1,39 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude = build,venv,.tox,.git,.pytest_cache
|
|
||||||
ignore = E402,E501,E731,E741,W503
|
|
||||||
max_line_length = 120
|
|
||||||
per_file_ignores =
|
|
||||||
devscripts/lazy_load_template.py: F401
|
|
||||||
|
|
||||||
|
|
||||||
[autoflake]
|
|
||||||
ignore-init-module-imports = true
|
|
||||||
ignore-pass-after-docstring = true
|
|
||||||
remove-all-unused-imports = true
|
|
||||||
remove-duplicate-keys = true
|
|
||||||
remove-unused-variables = true
|
|
||||||
|
|
||||||
|
|
||||||
[tox:tox]
|
|
||||||
skipsdist = true
|
|
||||||
envlist = py{39,310,311,312,313},pypy310
|
|
||||||
skip_missing_interpreters = true
|
|
||||||
|
|
||||||
[testenv] # tox
|
|
||||||
deps =
|
|
||||||
pytest
|
|
||||||
commands = pytest {posargs:"-m not download"}
|
|
||||||
passenv = HOME # For test_compat_expanduser
|
|
||||||
setenv =
|
|
||||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
|
||||||
|
|
||||||
|
|
||||||
[isort]
|
|
||||||
py_version = 39
|
|
||||||
multi_line_output = VERTICAL_HANGING_INDENT
|
|
||||||
line_length = 80
|
|
||||||
reverse_relative = true
|
|
||||||
ensure_newline_before_comments = true
|
|
||||||
include_trailing_comma = true
|
|
||||||
known_first_party =
|
|
||||||
test
|
|
||||||
@@ -5,12 +5,14 @@ # Supported sites
|
|||||||
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
||||||
The only reliable way to check if a site is supported is to try it.
|
The only reliable way to check if a site is supported is to try it.
|
||||||
|
|
||||||
|
- **10play**: [*10play*](## "netrc machine")
|
||||||
|
- **10play:season**
|
||||||
- **17live**
|
- **17live**
|
||||||
- **17live:clip**
|
- **17live:clip**
|
||||||
- **17live:vod**
|
- **17live:vod**
|
||||||
- **1News**: 1news.co.nz article videos
|
- **1News**: 1news.co.nz article videos
|
||||||
- **1tv**: Первый канал
|
- **1tv**: Первый канал
|
||||||
- **20min**
|
- **20min**: (**Currently broken**)
|
||||||
- **23video**
|
- **23video**
|
||||||
- **247sports**: (**Currently broken**)
|
- **247sports**: (**Currently broken**)
|
||||||
- **24tv.ua**
|
- **24tv.ua**
|
||||||
@@ -18,7 +20,6 @@ # Supported sites
|
|||||||
- **3sat**
|
- **3sat**
|
||||||
- **4tube**
|
- **4tube**
|
||||||
- **56.com**
|
- **56.com**
|
||||||
- **6play**
|
|
||||||
- **7plus**
|
- **7plus**
|
||||||
- **8tracks**
|
- **8tracks**
|
||||||
- **9c9media**
|
- **9c9media**
|
||||||
@@ -43,10 +44,6 @@ # Supported sites
|
|||||||
- **ADNSeason**: [*animationdigitalnetwork*](## "netrc machine") Animation Digital Network
|
- **ADNSeason**: [*animationdigitalnetwork*](## "netrc machine") Animation Digital Network
|
||||||
- **AdobeConnect**
|
- **AdobeConnect**
|
||||||
- **adobetv**
|
- **adobetv**
|
||||||
- **adobetv:channel**
|
|
||||||
- **adobetv:embed**
|
|
||||||
- **adobetv:show**
|
|
||||||
- **adobetv:video**
|
|
||||||
- **AdultSwim**
|
- **AdultSwim**
|
||||||
- **aenetworks**: A+E Networks: A&E, Lifetime, History.com, FYI Network and History Vault
|
- **aenetworks**: A+E Networks: A&E, Lifetime, History.com, FYI Network and History Vault
|
||||||
- **aenetworks:collection**
|
- **aenetworks:collection**
|
||||||
@@ -98,7 +95,6 @@ # Supported sites
|
|||||||
- **ARD**
|
- **ARD**
|
||||||
- **ARDMediathek**
|
- **ARDMediathek**
|
||||||
- **ARDMediathekCollection**
|
- **ARDMediathekCollection**
|
||||||
- **Arkena**
|
|
||||||
- **Art19**
|
- **Art19**
|
||||||
- **Art19Show**
|
- **Art19Show**
|
||||||
- **arte.sky.it**
|
- **arte.sky.it**
|
||||||
@@ -131,7 +127,6 @@ # Supported sites
|
|||||||
- **BaiduVideo**: 百度视频
|
- **BaiduVideo**: 百度视频
|
||||||
- **BanBye**
|
- **BanBye**
|
||||||
- **BanByeChannel**
|
- **BanByeChannel**
|
||||||
- **bandaichannel**
|
|
||||||
- **Bandcamp**
|
- **Bandcamp**
|
||||||
- **Bandcamp:album**
|
- **Bandcamp:album**
|
||||||
- **Bandcamp:user**
|
- **Bandcamp:user**
|
||||||
@@ -154,10 +149,8 @@ # Supported sites
|
|||||||
- **Beatport**
|
- **Beatport**
|
||||||
- **Beeg**
|
- **Beeg**
|
||||||
- **BehindKink**: (**Currently broken**)
|
- **BehindKink**: (**Currently broken**)
|
||||||
- **Bellator**
|
|
||||||
- **BellMedia**
|
|
||||||
- **BerufeTV**
|
- **BerufeTV**
|
||||||
- **Bet**: (**Currently broken**)
|
- **Bet**
|
||||||
- **bfi:player**: (**Currently broken**)
|
- **bfi:player**: (**Currently broken**)
|
||||||
- **bfmtv**
|
- **bfmtv**
|
||||||
- **bfmtv:article**
|
- **bfmtv:article**
|
||||||
@@ -195,6 +188,7 @@ # Supported sites
|
|||||||
- **BitChute**
|
- **BitChute**
|
||||||
- **BitChuteChannel**
|
- **BitChuteChannel**
|
||||||
- **BlackboardCollaborate**
|
- **BlackboardCollaborate**
|
||||||
|
- **BlackboardCollaborateLaunch**
|
||||||
- **BleacherReport**: (**Currently broken**)
|
- **BleacherReport**: (**Currently broken**)
|
||||||
- **BleacherReportCMS**: (**Currently broken**)
|
- **BleacherReportCMS**: (**Currently broken**)
|
||||||
- **blerp**
|
- **blerp**
|
||||||
@@ -223,6 +217,7 @@ # Supported sites
|
|||||||
- **Brilliantpala:Elearn**: [*brilliantpala*](## "netrc machine") VoD on elearn.brilliantpala.org
|
- **Brilliantpala:Elearn**: [*brilliantpala*](## "netrc machine") VoD on elearn.brilliantpala.org
|
||||||
- **bt:article**: Bergens Tidende Articles
|
- **bt:article**: Bergens Tidende Articles
|
||||||
- **bt:vestlendingen**: Bergens Tidende - Vestlendingen
|
- **bt:vestlendingen**: Bergens Tidende - Vestlendingen
|
||||||
|
- **BTVPlus**
|
||||||
- **Bundesliga**
|
- **Bundesliga**
|
||||||
- **Bundestag**
|
- **Bundestag**
|
||||||
- **BunnyCdn**
|
- **BunnyCdn**
|
||||||
@@ -246,7 +241,6 @@ # Supported sites
|
|||||||
- **Canalplus**: mycanal.fr and piwiplus.fr
|
- **Canalplus**: mycanal.fr and piwiplus.fr
|
||||||
- **Canalsurmas**
|
- **Canalsurmas**
|
||||||
- **CaracolTvPlay**: [*caracoltv-play*](## "netrc machine")
|
- **CaracolTvPlay**: [*caracoltv-play*](## "netrc machine")
|
||||||
- **CartoonNetwork**
|
|
||||||
- **cbc.ca**
|
- **cbc.ca**
|
||||||
- **cbc.ca:player**
|
- **cbc.ca:player**
|
||||||
- **cbc.ca:player:playlist**
|
- **cbc.ca:player:playlist**
|
||||||
@@ -284,19 +278,16 @@ # Supported sites
|
|||||||
- **Clipchamp**
|
- **Clipchamp**
|
||||||
- **Clippit**
|
- **Clippit**
|
||||||
- **ClipRs**: (**Currently broken**)
|
- **ClipRs**: (**Currently broken**)
|
||||||
- **ClipYouEmbed**
|
|
||||||
- **CloserToTruth**: (**Currently broken**)
|
- **CloserToTruth**: (**Currently broken**)
|
||||||
- **CloudflareStream**
|
- **CloudflareStream**
|
||||||
- **CloudyCDN**
|
- **CloudyCDN**
|
||||||
- **Clubic**: (**Currently broken**)
|
- **Clubic**: (**Currently broken**)
|
||||||
- **Clyp**
|
- **Clyp**
|
||||||
- **cmt.com**: (**Currently broken**)
|
|
||||||
- **CNBCVideo**
|
- **CNBCVideo**
|
||||||
- **CNN**
|
- **CNN**
|
||||||
- **CNNIndonesia**
|
- **CNNIndonesia**
|
||||||
- **ComedyCentral**
|
- **ComedyCentral**
|
||||||
- **ComedyCentralTV**
|
- **ConanClassic**: (**Currently broken**)
|
||||||
- **ConanClassic**
|
|
||||||
- **CondeNast**: Condé Nast media group: Allure, Architectural Digest, Ars Technica, Bon Appétit, Brides, Condé Nast, Condé Nast Traveler, Details, Epicurious, GQ, Glamour, Golf Digest, SELF, Teen Vogue, The New Yorker, Vanity Fair, Vogue, W Magazine, WIRED
|
- **CondeNast**: Condé Nast media group: Allure, Architectural Digest, Ars Technica, Bon Appétit, Brides, Condé Nast, Condé Nast Traveler, Details, Epicurious, GQ, Glamour, Golf Digest, SELF, Teen Vogue, The New Yorker, Vanity Fair, Vogue, W Magazine, WIRED
|
||||||
- **CONtv**
|
- **CONtv**
|
||||||
- **CookingChannel**
|
- **CookingChannel**
|
||||||
@@ -307,7 +298,6 @@ # Supported sites
|
|||||||
- **cpac**
|
- **cpac**
|
||||||
- **cpac:playlist**
|
- **cpac:playlist**
|
||||||
- **Cracked**
|
- **Cracked**
|
||||||
- **Crackle**
|
|
||||||
- **Craftsy**
|
- **Craftsy**
|
||||||
- **CrooksAndLiars**
|
- **CrooksAndLiars**
|
||||||
- **CrowdBunker**
|
- **CrowdBunker**
|
||||||
@@ -316,15 +306,12 @@ # Supported sites
|
|||||||
- **CSpan**: C-SPAN
|
- **CSpan**: C-SPAN
|
||||||
- **CSpanCongress**
|
- **CSpanCongress**
|
||||||
- **CtsNews**: 華視新聞
|
- **CtsNews**: 華視新聞
|
||||||
- **CTV**
|
|
||||||
- **CTVNews**
|
- **CTVNews**
|
||||||
- **cu.ntv.co.jp**: Nippon Television Network
|
- **cu.ntv.co.jp**: 日テレ無料TADA!
|
||||||
- **CultureUnplugged**
|
- **CultureUnplugged**
|
||||||
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
||||||
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
||||||
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
||||||
- **cwtv**
|
|
||||||
- **cwtv:movie**
|
|
||||||
- **Cybrary**: [*cybrary*](## "netrc machine")
|
- **Cybrary**: [*cybrary*](## "netrc machine")
|
||||||
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
||||||
- **DacastPlaylist**
|
- **DacastPlaylist**
|
||||||
@@ -396,7 +383,6 @@ # Supported sites
|
|||||||
- **dw:article**: (**Currently broken**)
|
- **dw:article**: (**Currently broken**)
|
||||||
- **dzen.ru**: Дзен (dzen) formerly Яндекс.Дзен (Yandex Zen)
|
- **dzen.ru**: Дзен (dzen) formerly Яндекс.Дзен (Yandex Zen)
|
||||||
- **dzen.ru:channel**
|
- **dzen.ru:channel**
|
||||||
- **EaglePlatform**
|
|
||||||
- **EbaumsWorld**
|
- **EbaumsWorld**
|
||||||
- **Ebay**
|
- **Ebay**
|
||||||
- **egghead:course**: egghead.io course
|
- **egghead:course**: egghead.io course
|
||||||
@@ -447,6 +433,8 @@ # Supported sites
|
|||||||
- **fancode:live**: [*fancode*](## "netrc machine") (**Currently broken**)
|
- **fancode:live**: [*fancode*](## "netrc machine") (**Currently broken**)
|
||||||
- **fancode:vod**: [*fancode*](## "netrc machine") (**Currently broken**)
|
- **fancode:vod**: [*fancode*](## "netrc machine") (**Currently broken**)
|
||||||
- **Fathom**
|
- **Fathom**
|
||||||
|
- **Faulio**
|
||||||
|
- **FaulioLive**
|
||||||
- **faz.net**
|
- **faz.net**
|
||||||
- **fc2**: [*fc2*](## "netrc machine")
|
- **fc2**: [*fc2*](## "netrc machine")
|
||||||
- **fc2:embed**
|
- **fc2:embed**
|
||||||
@@ -458,7 +446,6 @@ # Supported sites
|
|||||||
- **Filmweb**
|
- **Filmweb**
|
||||||
- **FiveThirtyEight**
|
- **FiveThirtyEight**
|
||||||
- **FiveTV**
|
- **FiveTV**
|
||||||
- **FlexTV**
|
|
||||||
- **Flickr**
|
- **Flickr**
|
||||||
- **Floatplane**
|
- **Floatplane**
|
||||||
- **FloatplaneChannel**
|
- **FloatplaneChannel**
|
||||||
@@ -574,9 +561,7 @@ # Supported sites
|
|||||||
- **HollywoodReporterPlaylist**
|
- **HollywoodReporterPlaylist**
|
||||||
- **Holodex**
|
- **Holodex**
|
||||||
- **HotNewHipHop**: (**Currently broken**)
|
- **HotNewHipHop**: (**Currently broken**)
|
||||||
- **hotstar**
|
- **hotstar**: JioHotstar
|
||||||
- **hotstar:playlist**
|
|
||||||
- **hotstar:season**
|
|
||||||
- **hotstar:series**
|
- **hotstar:series**
|
||||||
- **hrfernsehen**
|
- **hrfernsehen**
|
||||||
- **HRTi**: [*hrti*](## "netrc machine")
|
- **HRTi**: [*hrti*](## "netrc machine")
|
||||||
@@ -589,7 +574,7 @@ # Supported sites
|
|||||||
- **Hungama**
|
- **Hungama**
|
||||||
- **HungamaAlbumPlaylist**
|
- **HungamaAlbumPlaylist**
|
||||||
- **HungamaSong**
|
- **HungamaSong**
|
||||||
- **huya:live**: huya.com
|
- **huya:live**: 虎牙直播
|
||||||
- **huya:video**: 虎牙视频
|
- **huya:video**: 虎牙视频
|
||||||
- **Hypem**
|
- **Hypem**
|
||||||
- **Hytale**
|
- **Hytale**
|
||||||
@@ -646,13 +631,13 @@ # Supported sites
|
|||||||
- **Jamendo**
|
- **Jamendo**
|
||||||
- **JamendoAlbum**
|
- **JamendoAlbum**
|
||||||
- **JeuxVideo**: (**Currently broken**)
|
- **JeuxVideo**: (**Currently broken**)
|
||||||
- **jiocinema**: [*jiocinema*](## "netrc machine")
|
|
||||||
- **jiocinema:series**: [*jiocinema*](## "netrc machine")
|
|
||||||
- **jiosaavn:album**
|
- **jiosaavn:album**
|
||||||
|
- **jiosaavn:artist**
|
||||||
- **jiosaavn:playlist**
|
- **jiosaavn:playlist**
|
||||||
|
- **jiosaavn:show**
|
||||||
|
- **jiosaavn:show:playlist**
|
||||||
- **jiosaavn:song**
|
- **jiosaavn:song**
|
||||||
- **Joj**
|
- **Joj**
|
||||||
- **JoqrAg**: 超!A&G+ 文化放送 (f.k.a. AGQR) Nippon Cultural Broadcasting, Inc. (JOQR)
|
|
||||||
- **Jove**
|
- **Jove**
|
||||||
- **JStream**
|
- **JStream**
|
||||||
- **JTBC**: jtbc.co.kr
|
- **JTBC**: jtbc.co.kr
|
||||||
@@ -703,8 +688,8 @@ # Supported sites
|
|||||||
- **lbry:channel**: odysee.com channels
|
- **lbry:channel**: odysee.com channels
|
||||||
- **lbry:playlist**: odysee.com playlists
|
- **lbry:playlist**: odysee.com playlists
|
||||||
- **LCI**
|
- **LCI**
|
||||||
- **Lcp**
|
- **Lcp**: (**Currently broken**)
|
||||||
- **LcpPlay**
|
- **LcpPlay**: (**Currently broken**)
|
||||||
- **Le**: 乐视网
|
- **Le**: 乐视网
|
||||||
- **LearningOnScreen**
|
- **LearningOnScreen**
|
||||||
- **Lecture2Go**: (**Currently broken**)
|
- **Lecture2Go**: (**Currently broken**)
|
||||||
@@ -723,9 +708,6 @@ # Supported sites
|
|||||||
- **life:embed**
|
- **life:embed**
|
||||||
- **likee**
|
- **likee**
|
||||||
- **likee:user**
|
- **likee:user**
|
||||||
- **limelight**
|
|
||||||
- **limelight:channel**
|
|
||||||
- **limelight:channel_list**
|
|
||||||
- **LinkedIn**: [*linkedin*](## "netrc machine")
|
- **LinkedIn**: [*linkedin*](## "netrc machine")
|
||||||
- **linkedin:events**: [*linkedin*](## "netrc machine")
|
- **linkedin:events**: [*linkedin*](## "netrc machine")
|
||||||
- **linkedin:learning**: [*linkedin*](## "netrc machine")
|
- **linkedin:learning**: [*linkedin*](## "netrc machine")
|
||||||
@@ -733,7 +715,7 @@ # Supported sites
|
|||||||
- **Liputan6**
|
- **Liputan6**
|
||||||
- **ListenNotes**
|
- **ListenNotes**
|
||||||
- **LiTV**
|
- **LiTV**
|
||||||
- **LiveJournal**
|
- **LiveJournal**: (**Currently broken**)
|
||||||
- **livestream**
|
- **livestream**
|
||||||
- **livestream:original**
|
- **livestream:original**
|
||||||
- **Livestreamfails**
|
- **Livestreamfails**
|
||||||
@@ -772,6 +754,7 @@ # Supported sites
|
|||||||
- **massengeschmack.tv**
|
- **massengeschmack.tv**
|
||||||
- **Masters**
|
- **Masters**
|
||||||
- **MatchTV**
|
- **MatchTV**
|
||||||
|
- **Mave**
|
||||||
- **MBN**: mbn.co.kr (매일방송)
|
- **MBN**: mbn.co.kr (매일방송)
|
||||||
- **MDR**: MDR.DE
|
- **MDR**: MDR.DE
|
||||||
- **MedalTV**
|
- **MedalTV**
|
||||||
@@ -806,16 +789,18 @@ # Supported sites
|
|||||||
- **minds:channel**
|
- **minds:channel**
|
||||||
- **minds:group**
|
- **minds:group**
|
||||||
- **Minoto**
|
- **Minoto**
|
||||||
|
- **mir24.tv**
|
||||||
- **mirrativ**
|
- **mirrativ**
|
||||||
- **mirrativ:user**
|
- **mirrativ:user**
|
||||||
- **MirrorCoUK**
|
- **MirrorCoUK**
|
||||||
- **MiTele**: mitele.es
|
|
||||||
- **mixch**
|
- **mixch**
|
||||||
- **mixch:archive**
|
- **mixch:archive**
|
||||||
- **mixch:movie**
|
- **mixch:movie**
|
||||||
- **mixcloud**
|
- **mixcloud**
|
||||||
- **mixcloud:playlist**
|
- **mixcloud:playlist**
|
||||||
- **mixcloud:user**
|
- **mixcloud:user**
|
||||||
|
- **Mixlr**
|
||||||
|
- **MixlrRecoring**
|
||||||
- **MLB**
|
- **MLB**
|
||||||
- **MLBArticle**
|
- **MLBArticle**
|
||||||
- **MLBTV**: [*mlb*](## "netrc machine")
|
- **MLBTV**: [*mlb*](## "netrc machine")
|
||||||
@@ -828,7 +813,7 @@ # Supported sites
|
|||||||
- **Mojevideo**: mojevideo.sk
|
- **Mojevideo**: mojevideo.sk
|
||||||
- **Mojvideo**
|
- **Mojvideo**
|
||||||
- **Monstercat**
|
- **Monstercat**
|
||||||
- **MonsterSirenHypergryphMusic**
|
- **monstersiren**: 塞壬唱片
|
||||||
- **Motherless**
|
- **Motherless**
|
||||||
- **MotherlessGallery**
|
- **MotherlessGallery**
|
||||||
- **MotherlessGroup**
|
- **MotherlessGroup**
|
||||||
@@ -842,12 +827,6 @@ # Supported sites
|
|||||||
- **MSN**
|
- **MSN**
|
||||||
- **mtg**: MTG services
|
- **mtg**: MTG services
|
||||||
- **mtv**
|
- **mtv**
|
||||||
- **mtv.de**: (**Currently broken**)
|
|
||||||
- **mtv.it**
|
|
||||||
- **mtv.it:programma**
|
|
||||||
- **mtv:video**
|
|
||||||
- **mtvjapan**
|
|
||||||
- **mtvservices:embedded**
|
|
||||||
- **MTVUutisetArticle**: (**Currently broken**)
|
- **MTVUutisetArticle**: (**Currently broken**)
|
||||||
- **MuenchenTV**: münchen.tv (**Currently broken**)
|
- **MuenchenTV**: münchen.tv (**Currently broken**)
|
||||||
- **MujRozhlas**
|
- **MujRozhlas**
|
||||||
@@ -880,19 +859,19 @@ # Supported sites
|
|||||||
- **Naver**
|
- **Naver**
|
||||||
- **Naver:live**
|
- **Naver:live**
|
||||||
- **navernow**
|
- **navernow**
|
||||||
- **nba**
|
- **nba**: (**Currently broken**)
|
||||||
- **nba:channel**
|
- **nba:channel**: (**Currently broken**)
|
||||||
- **nba:embed**
|
- **nba:embed**: (**Currently broken**)
|
||||||
- **nba:watch**
|
- **nba:watch**: (**Currently broken**)
|
||||||
- **nba:watch:collection**
|
- **nba:watch:collection**: (**Currently broken**)
|
||||||
- **nba:watch:embed**
|
- **nba:watch:embed**: (**Currently broken**)
|
||||||
- **NBC**
|
- **NBC**
|
||||||
- **NBCNews**
|
- **NBCNews**
|
||||||
- **nbcolympics**
|
- **nbcolympics**
|
||||||
- **nbcolympics:stream**
|
- **nbcolympics:stream**: (**Currently broken**)
|
||||||
- **NBCSports**
|
- **NBCSports**: (**Currently broken**)
|
||||||
- **NBCSportsStream**
|
- **NBCSportsStream**: (**Currently broken**)
|
||||||
- **NBCSportsVPlayer**
|
- **NBCSportsVPlayer**: (**Currently broken**)
|
||||||
- **NBCStations**
|
- **NBCStations**
|
||||||
- **ndr**: NDR.de - Norddeutscher Rundfunk
|
- **ndr**: NDR.de - Norddeutscher Rundfunk
|
||||||
- **ndr:embed**
|
- **ndr:embed**
|
||||||
@@ -947,9 +926,6 @@ # Supported sites
|
|||||||
- **NhkVodProgram**
|
- **NhkVodProgram**
|
||||||
- **nhl.com**
|
- **nhl.com**
|
||||||
- **nick.com**
|
- **nick.com**
|
||||||
- **nick.de**
|
|
||||||
- **nickelodeon:br**
|
|
||||||
- **nickelodeonru**
|
|
||||||
- **niconico**: [*niconico*](## "netrc machine") ニコニコ動画
|
- **niconico**: [*niconico*](## "netrc machine") ニコニコ動画
|
||||||
- **niconico:history**: NicoNico user history or likes. Requires cookies.
|
- **niconico:history**: NicoNico user history or likes. Requires cookies.
|
||||||
- **niconico:live**: [*niconico*](## "netrc machine") ニコニコ生放送
|
- **niconico:live**: [*niconico*](## "netrc machine") ニコニコ生放送
|
||||||
@@ -968,11 +944,10 @@ # Supported sites
|
|||||||
- **Nitter**
|
- **Nitter**
|
||||||
- **njoy**: N-JOY
|
- **njoy**: N-JOY
|
||||||
- **njoy:embed**
|
- **njoy:embed**
|
||||||
- **NobelPrize**: (**Currently broken**)
|
- **NobelPrize**
|
||||||
- **NoicePodcast**
|
- **NoicePodcast**
|
||||||
- **NonkTube**
|
- **NonkTube**
|
||||||
- **NoodleMagazine**
|
- **NoodleMagazine**
|
||||||
- **Noovo**
|
|
||||||
- **NOSNLArticle**
|
- **NOSNLArticle**
|
||||||
- **Nova**: TN.cz, Prásk.tv, Nova.cz, Novaplus.cz, FANDA.tv, Krásná.cz and Doma.cz
|
- **Nova**: TN.cz, Prásk.tv, Nova.cz, Novaplus.cz, FANDA.tv, Krásná.cz and Doma.cz
|
||||||
- **NovaEmbed**
|
- **NovaEmbed**
|
||||||
@@ -1028,6 +1003,7 @@ # Supported sites
|
|||||||
- **onet.tv:channel**
|
- **onet.tv:channel**
|
||||||
- **OnetMVP**
|
- **OnetMVP**
|
||||||
- **OnionStudios**
|
- **OnionStudios**
|
||||||
|
- **onsen**: [*onsen*](## "netrc machine") インターネットラジオステーション<音泉>
|
||||||
- **Opencast**
|
- **Opencast**
|
||||||
- **OpencastPlaylist**
|
- **OpencastPlaylist**
|
||||||
- **openrec**
|
- **openrec**
|
||||||
@@ -1052,13 +1028,10 @@ # Supported sites
|
|||||||
- **Panopto**
|
- **Panopto**
|
||||||
- **PanoptoList**
|
- **PanoptoList**
|
||||||
- **PanoptoPlaylist**
|
- **PanoptoPlaylist**
|
||||||
- **ParamountNetwork**
|
|
||||||
- **ParamountPlus**
|
|
||||||
- **ParamountPlusSeries**
|
|
||||||
- **ParamountPressExpress**
|
- **ParamountPressExpress**
|
||||||
- **Parler**: Posts on parler.com
|
- **Parler**: Posts on parler.com
|
||||||
- **parliamentlive.tv**: UK parliament videos
|
- **parliamentlive.tv**: UK parliament videos
|
||||||
- **Parlview**: (**Currently broken**)
|
- **Parlview**
|
||||||
- **parti:livestream**
|
- **parti:livestream**
|
||||||
- **parti:video**
|
- **parti:video**
|
||||||
- **patreon**
|
- **patreon**
|
||||||
@@ -1081,21 +1054,19 @@ # Supported sites
|
|||||||
- **Photobucket**
|
- **Photobucket**
|
||||||
- **PiaLive**
|
- **PiaLive**
|
||||||
- **Piapro**: [*piapro*](## "netrc machine")
|
- **Piapro**: [*piapro*](## "netrc machine")
|
||||||
- **Picarto**
|
- **picarto**
|
||||||
- **PicartoVod**
|
- **picarto:vod**
|
||||||
- **Piksel**
|
- **Piksel**
|
||||||
- **Pinkbike**
|
- **Pinkbike**
|
||||||
- **Pinterest**
|
- **Pinterest**
|
||||||
- **PinterestCollection**
|
- **PinterestCollection**
|
||||||
- **PiramideTV**
|
- **PiramideTV**
|
||||||
- **PiramideTVChannel**
|
- **PiramideTVChannel**
|
||||||
- **pixiv:sketch**
|
|
||||||
- **pixiv:sketch:user**
|
|
||||||
- **Pladform**
|
|
||||||
- **PlanetMarathi**
|
- **PlanetMarathi**
|
||||||
- **Platzi**: [*platzi*](## "netrc machine")
|
- **Platzi**: [*platzi*](## "netrc machine")
|
||||||
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
||||||
- **player.sky.it**
|
- **player.sky.it**
|
||||||
|
- **PlayerFm**
|
||||||
- **playeur**
|
- **playeur**
|
||||||
- **PlayPlusTV**: [*playplustv*](## "netrc machine")
|
- **PlayPlusTV**: [*playplustv*](## "netrc machine")
|
||||||
- **PlaySuisse**: [*playsuisse*](## "netrc machine")
|
- **PlaySuisse**: [*playsuisse*](## "netrc machine")
|
||||||
@@ -1106,6 +1077,7 @@ # Supported sites
|
|||||||
- **pluralsight:course**
|
- **pluralsight:course**
|
||||||
- **PlutoTV**: (**Currently broken**)
|
- **PlutoTV**: (**Currently broken**)
|
||||||
- **PlVideo**: Платформа
|
- **PlVideo**: Платформа
|
||||||
|
- **PlyrEmbed**
|
||||||
- **PodbayFM**
|
- **PodbayFM**
|
||||||
- **PodbayFMChannel**
|
- **PodbayFMChannel**
|
||||||
- **Podchaser**
|
- **Podchaser**
|
||||||
@@ -1259,6 +1231,7 @@ # Supported sites
|
|||||||
- **rtve.es:alacarta**: RTVE a la carta and Play
|
- **rtve.es:alacarta**: RTVE a la carta and Play
|
||||||
- **rtve.es:audio**: RTVE audio
|
- **rtve.es:audio**: RTVE audio
|
||||||
- **rtve.es:live**: RTVE.es live streams
|
- **rtve.es:live**: RTVE.es live streams
|
||||||
|
- **rtve.es:program**: RTVE.es programs
|
||||||
- **rtve.es:television**
|
- **rtve.es:television**
|
||||||
- **rtvslo.si**
|
- **rtvslo.si**
|
||||||
- **rtvslo.si:show**
|
- **rtvslo.si:show**
|
||||||
@@ -1275,8 +1248,7 @@ # Supported sites
|
|||||||
- **rutube:person**: Rutube person videos
|
- **rutube:person**: Rutube person videos
|
||||||
- **rutube:playlist**: Rutube playlists
|
- **rutube:playlist**: Rutube playlists
|
||||||
- **rutube:tags**: Rutube tags
|
- **rutube:tags**: Rutube tags
|
||||||
- **RUTV**: RUTV.RU
|
- **Ruutu**: (**Currently broken**)
|
||||||
- **Ruutu**
|
|
||||||
- **Ruv**
|
- **Ruv**
|
||||||
- **ruv.is:spila**
|
- **ruv.is:spila**
|
||||||
- **S4C**
|
- **S4C**
|
||||||
@@ -1294,6 +1266,7 @@ # Supported sites
|
|||||||
- **SampleFocus**
|
- **SampleFocus**
|
||||||
- **Sangiin**: 参議院インターネット審議中継 (archive)
|
- **Sangiin**: 参議院インターネット審議中継 (archive)
|
||||||
- **Sapo**: SAPO Vídeos
|
- **Sapo**: SAPO Vídeos
|
||||||
|
- **SaucePlus**: Sauce+
|
||||||
- **SBS**: sbs.com.au
|
- **SBS**: sbs.com.au
|
||||||
- **sbs.co.kr**
|
- **sbs.co.kr**
|
||||||
- **sbs.co.kr:allvod_program**
|
- **sbs.co.kr:allvod_program**
|
||||||
@@ -1326,6 +1299,7 @@ # Supported sites
|
|||||||
- **SharePoint**
|
- **SharePoint**
|
||||||
- **ShareVideosEmbed**
|
- **ShareVideosEmbed**
|
||||||
- **ShemarooMe**
|
- **ShemarooMe**
|
||||||
|
- **Shiey**
|
||||||
- **ShowRoomLive**
|
- **ShowRoomLive**
|
||||||
- **ShugiinItvLive**: 衆議院インターネット審議中継
|
- **ShugiinItvLive**: 衆議院インターネット審議中継
|
||||||
- **ShugiinItvLiveRoom**: 衆議院インターネット審議中継 (中継)
|
- **ShugiinItvLiveRoom**: 衆議院インターネット審議中継 (中継)
|
||||||
@@ -1348,7 +1322,10 @@ # Supported sites
|
|||||||
- **Slideshare**
|
- **Slideshare**
|
||||||
- **SlidesLive**
|
- **SlidesLive**
|
||||||
- **Slutload**
|
- **Slutload**
|
||||||
- **Smotrim**
|
- **smotrim**
|
||||||
|
- **smotrim:audio**
|
||||||
|
- **smotrim:live**
|
||||||
|
- **smotrim:playlist**
|
||||||
- **SnapchatSpotlight**
|
- **SnapchatSpotlight**
|
||||||
- **Snotr**
|
- **Snotr**
|
||||||
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
||||||
@@ -1375,33 +1352,32 @@ # Supported sites
|
|||||||
- **southpark.cc.com:español**
|
- **southpark.cc.com:español**
|
||||||
- **southpark.de**
|
- **southpark.de**
|
||||||
- **southpark.lat**
|
- **southpark.lat**
|
||||||
- **southpark.nl**
|
- **southparkstudios.co.uk**
|
||||||
- **southparkstudios.dk**
|
- **southparkstudios.com.br**
|
||||||
|
- **southparkstudios.nu**
|
||||||
- **SovietsCloset**
|
- **SovietsCloset**
|
||||||
- **SovietsClosetPlaylist**
|
- **SovietsClosetPlaylist**
|
||||||
- **SpankBang**
|
- **SpankBang**
|
||||||
- **SpankBangPlaylist**
|
- **SpankBangPlaylist**
|
||||||
- **Spiegel**
|
- **Spiegel**
|
||||||
- **Sport5**
|
- **Sport5**
|
||||||
- **SportBox**
|
- **SportBox**: (**Currently broken**)
|
||||||
- **SportDeutschland**
|
- **SportDeutschland**
|
||||||
- **spotify**: Spotify episodes (**Currently broken**)
|
|
||||||
- **spotify:show**: Spotify shows (**Currently broken**)
|
|
||||||
- **Spreaker**
|
- **Spreaker**
|
||||||
- **SpreakerShow**
|
- **SpreakerShow**
|
||||||
- **SpringboardPlatform**
|
- **SpringboardPlatform**
|
||||||
- **Sprout**
|
|
||||||
- **SproutVideo**
|
- **SproutVideo**
|
||||||
- **sr:mediathek**: Saarländischer Rundfunk (**Currently broken**)
|
- **sr:mediathek**: Saarländischer Rundfunk
|
||||||
- **SRGSSR**
|
- **SRGSSR**
|
||||||
- **SRGSSRPlay**: srf.ch, rts.ch, rsi.ch, rtr.ch and swissinfo.ch play sites
|
- **SRGSSRPlay**: srf.ch, rts.ch, rsi.ch, rtr.ch and swissinfo.ch play sites
|
||||||
- **StacommuLive**: [*stacommu*](## "netrc machine")
|
- **StacommuLive**: [*stacommu*](## "netrc machine")
|
||||||
- **StacommuVOD**: [*stacommu*](## "netrc machine")
|
- **StacommuVOD**: [*stacommu*](## "netrc machine")
|
||||||
- **StagePlusVODConcert**: [*stageplus*](## "netrc machine")
|
- **StagePlusVODConcert**: [*stageplus*](## "netrc machine")
|
||||||
- **stanfordoc**: Stanford Open ClassRoom
|
- **stanfordoc**: Stanford Open ClassRoom
|
||||||
- **StarTrek**: (**Currently broken**)
|
- **startrek**: STAR TREK
|
||||||
- **startv**
|
- **startv**
|
||||||
- **Steam**
|
- **Steam**
|
||||||
|
- **SteamCommunity**
|
||||||
- **SteamCommunityBroadcast**
|
- **SteamCommunityBroadcast**
|
||||||
- **Stitcher**
|
- **Stitcher**
|
||||||
- **StitcherShow**
|
- **StitcherShow**
|
||||||
@@ -1422,12 +1398,11 @@ # Supported sites
|
|||||||
- **SunPorno**
|
- **SunPorno**
|
||||||
- **sverigesradio:episode**
|
- **sverigesradio:episode**
|
||||||
- **sverigesradio:publication**
|
- **sverigesradio:publication**
|
||||||
- **SVT**
|
- **svt:page**
|
||||||
- **SVTPage**
|
- **svt:play**: SVT Play and Öppet arkiv
|
||||||
- **SVTPlay**: SVT Play and Öppet arkiv
|
- **svt:play:series**
|
||||||
- **SVTSeries**
|
|
||||||
- **SwearnetEpisode**
|
- **SwearnetEpisode**
|
||||||
- **Syfy**: (**Currently broken**)
|
- **Syfy**
|
||||||
- **SYVDK**
|
- **SYVDK**
|
||||||
- **SztvHu**
|
- **SztvHu**
|
||||||
- **t-online.de**: (**Currently broken**)
|
- **t-online.de**: (**Currently broken**)
|
||||||
@@ -1471,14 +1446,13 @@ # Supported sites
|
|||||||
- **Telewebion**: (**Currently broken**)
|
- **Telewebion**: (**Currently broken**)
|
||||||
- **Tempo**
|
- **Tempo**
|
||||||
- **TennisTV**: [*tennistv*](## "netrc machine")
|
- **TennisTV**: [*tennistv*](## "netrc machine")
|
||||||
- **TenPlay**: [*10play*](## "netrc machine")
|
|
||||||
- **TenPlaySeason**
|
|
||||||
- **TF1**
|
- **TF1**
|
||||||
- **TFO**
|
- **TFO**: (**Currently broken**)
|
||||||
- **theatercomplextown:ppv**: [*theatercomplextown*](## "netrc machine")
|
- **theatercomplextown:ppv**: [*theatercomplextown*](## "netrc machine")
|
||||||
- **theatercomplextown:vod**: [*theatercomplextown*](## "netrc machine")
|
- **theatercomplextown:vod**: [*theatercomplextown*](## "netrc machine")
|
||||||
- **TheGuardianPodcast**
|
- **TheGuardianPodcast**
|
||||||
- **TheGuardianPodcastPlaylist**
|
- **TheGuardianPodcastPlaylist**
|
||||||
|
- **TheHighWire**
|
||||||
- **TheHoleTv**
|
- **TheHoleTv**
|
||||||
- **TheIntercept**
|
- **TheIntercept**
|
||||||
- **ThePlatform**
|
- **ThePlatform**
|
||||||
@@ -1510,6 +1484,7 @@ # Supported sites
|
|||||||
- **tokfm:podcast**
|
- **tokfm:podcast**
|
||||||
- **ToonGoggles**
|
- **ToonGoggles**
|
||||||
- **tou.tv**: [*toutv*](## "netrc machine")
|
- **tou.tv**: [*toutv*](## "netrc machine")
|
||||||
|
- **toutiao**: 今日头条
|
||||||
- **Toypics**: Toypics video (**Currently broken**)
|
- **Toypics**: Toypics video (**Currently broken**)
|
||||||
- **ToypicsUser**: Toypics user profile (**Currently broken**)
|
- **ToypicsUser**: Toypics user profile (**Currently broken**)
|
||||||
- **TrailerAddict**: (**Currently broken**)
|
- **TrailerAddict**: (**Currently broken**)
|
||||||
@@ -1526,16 +1501,17 @@ # Supported sites
|
|||||||
- **TrueID**
|
- **TrueID**
|
||||||
- **TruNews**
|
- **TruNews**
|
||||||
- **Truth**
|
- **Truth**
|
||||||
- **TruTV**
|
- **ttinglive**: 띵라이브 (formerly FlexTV)
|
||||||
- **Tube8**: (**Currently broken**)
|
- **Tube8**: (**Currently broken**)
|
||||||
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
||||||
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
||||||
- **tubitv**: [*tubitv*](## "netrc machine")
|
- **tubitv**: [*tubitv*](## "netrc machine")
|
||||||
- **tubitv:series**
|
- **tubitv:series**
|
||||||
- **Tumblr**: [*tumblr*](## "netrc machine")
|
- **Tumblr**: [*tumblr*](## "netrc machine")
|
||||||
- **TuneInPodcast**
|
- **tunein:embed**
|
||||||
- **TuneInPodcastEpisode**
|
- **tunein:podcast**
|
||||||
- **TuneInStation**
|
- **tunein:podcast:program**
|
||||||
|
- **tunein:station**
|
||||||
- **tv.dfb.de**
|
- **tv.dfb.de**
|
||||||
- **TV2**
|
- **TV2**
|
||||||
- **TV2Article**
|
- **TV2Article**
|
||||||
@@ -1545,8 +1521,8 @@ # Supported sites
|
|||||||
- **tv2playseries.hu**
|
- **tv2playseries.hu**
|
||||||
- **TV4**: tv4.se and tv4play.se
|
- **TV4**: tv4.se and tv4play.se
|
||||||
- **TV5MONDE**
|
- **TV5MONDE**
|
||||||
- **tv5unis**
|
- **tv5unis**: (**Currently broken**)
|
||||||
- **tv5unis:video**
|
- **tv5unis:video**: (**Currently broken**)
|
||||||
- **tv8.it**
|
- **tv8.it**
|
||||||
- **tv8.it:live**: TV8 Live
|
- **tv8.it:live**: TV8 Live
|
||||||
- **tv8.it:playlist**: TV8 Playlist
|
- **tv8.it:playlist**: TV8 Playlist
|
||||||
@@ -1558,7 +1534,6 @@ # Supported sites
|
|||||||
- **TVer**
|
- **TVer**
|
||||||
- **tvigle**: Интернет-телевидение Tvigle.ru
|
- **tvigle**: Интернет-телевидение Tvigle.ru
|
||||||
- **TVIPlayer**
|
- **TVIPlayer**
|
||||||
- **tvland.com**
|
|
||||||
- **TVN24**: (**Currently broken**)
|
- **TVN24**: (**Currently broken**)
|
||||||
- **TVNoe**: (**Currently broken**)
|
- **TVNoe**: (**Currently broken**)
|
||||||
- **tvopengr:embed**: tvopen.gr embedded videos
|
- **tvopengr:embed**: tvopen.gr embedded videos
|
||||||
@@ -1571,6 +1546,7 @@ # Supported sites
|
|||||||
- **TVPlayer**
|
- **TVPlayer**
|
||||||
- **TVPlayHome**
|
- **TVPlayHome**
|
||||||
- **tvw**
|
- **tvw**
|
||||||
|
- **tvw:news**
|
||||||
- **tvw:tvchannels**
|
- **tvw:tvchannels**
|
||||||
- **Tweakers**
|
- **Tweakers**
|
||||||
- **TwitCasting**
|
- **TwitCasting**
|
||||||
@@ -1599,8 +1575,9 @@ # Supported sites
|
|||||||
- **UKTVPlay**
|
- **UKTVPlay**
|
||||||
- **UlizaPlayer**
|
- **UlizaPlayer**
|
||||||
- **UlizaPortal**: ulizaportal.jp
|
- **UlizaPortal**: ulizaportal.jp
|
||||||
- **umg:de**: Universal Music Deutschland (**Currently broken**)
|
- **umg:de**: Universal Music Deutschland
|
||||||
- **Unistra**
|
- **Unistra**
|
||||||
|
- **UnitedNationsWebTv**
|
||||||
- **Unity**: (**Currently broken**)
|
- **Unity**: (**Currently broken**)
|
||||||
- **uol.com.br**
|
- **uol.com.br**
|
||||||
- **uplynk**
|
- **uplynk**
|
||||||
@@ -1616,16 +1593,15 @@ # Supported sites
|
|||||||
- **Varzesh3**: (**Currently broken**)
|
- **Varzesh3**: (**Currently broken**)
|
||||||
- **Vbox7**
|
- **Vbox7**
|
||||||
- **Veo**
|
- **Veo**
|
||||||
- **Vesti**: Вести.Ru (**Currently broken**)
|
|
||||||
- **Vevo**
|
- **Vevo**
|
||||||
- **VevoPlaylist**
|
- **VevoPlaylist**
|
||||||
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
||||||
- **vh1.com**
|
- **vh1.com**
|
||||||
- **vhx:embed**: [*vimeo*](## "netrc machine")
|
- **vhx:embed**: [*vimeo*](## "netrc machine")
|
||||||
- **vice**
|
- **vice**: (**Currently broken**)
|
||||||
- **vice:article**
|
- **vice:article**: (**Currently broken**)
|
||||||
- **vice:show**
|
- **vice:show**: (**Currently broken**)
|
||||||
- **Viddler**
|
- **Viddler**: (**Currently broken**)
|
||||||
- **Videa**
|
- **Videa**
|
||||||
- **video.arnes.si**: Arnes Video
|
- **video.arnes.si**: Arnes Video
|
||||||
- **video.google:search**: Google Video search; "gvsearch:" prefix
|
- **video.google:search**: Google Video search; "gvsearch:" prefix
|
||||||
@@ -1656,6 +1632,7 @@ # Supported sites
|
|||||||
- **vimeo**: [*vimeo*](## "netrc machine")
|
- **vimeo**: [*vimeo*](## "netrc machine")
|
||||||
- **vimeo:album**: [*vimeo*](## "netrc machine")
|
- **vimeo:album**: [*vimeo*](## "netrc machine")
|
||||||
- **vimeo:channel**: [*vimeo*](## "netrc machine")
|
- **vimeo:channel**: [*vimeo*](## "netrc machine")
|
||||||
|
- **vimeo:event**: [*vimeo*](## "netrc machine")
|
||||||
- **vimeo:group**: [*vimeo*](## "netrc machine")
|
- **vimeo:group**: [*vimeo*](## "netrc machine")
|
||||||
- **vimeo:likes**: [*vimeo*](## "netrc machine") Vimeo user likes
|
- **vimeo:likes**: [*vimeo*](## "netrc machine") Vimeo user likes
|
||||||
- **vimeo:ondemand**: [*vimeo*](## "netrc machine")
|
- **vimeo:ondemand**: [*vimeo*](## "netrc machine")
|
||||||
@@ -1696,7 +1673,7 @@ # Supported sites
|
|||||||
- **vrsquare:section**
|
- **vrsquare:section**
|
||||||
- **VRT**: VRT NWS, Flanders News, Flandern Info and Sporza
|
- **VRT**: VRT NWS, Flanders News, Flandern Info and Sporza
|
||||||
- **vrtmax**: [*vrtnu*](## "netrc machine") VRT MAX (formerly VRT NU)
|
- **vrtmax**: [*vrtnu*](## "netrc machine") VRT MAX (formerly VRT NU)
|
||||||
- **VTM**: (**Currently broken**)
|
- **VTM**
|
||||||
- **VTV**
|
- **VTV**
|
||||||
- **VTVGo**
|
- **VTVGo**
|
||||||
- **VTXTV**: [*vtxtv*](## "netrc machine")
|
- **VTXTV**: [*vtxtv*](## "netrc machine")
|
||||||
@@ -1763,7 +1740,6 @@ # Supported sites
|
|||||||
- **wykop:dig:comment**
|
- **wykop:dig:comment**
|
||||||
- **wykop:post**
|
- **wykop:post**
|
||||||
- **wykop:post:comment**
|
- **wykop:post:comment**
|
||||||
- **Xanimu**
|
|
||||||
- **XboxClips**
|
- **XboxClips**
|
||||||
- **XHamster**
|
- **XHamster**
|
||||||
- **XHamsterEmbed**
|
- **XHamsterEmbed**
|
||||||
|
|||||||
@@ -36,7 +36,6 @@
|
|||||||
"verbose": true,
|
"verbose": true,
|
||||||
"writedescription": false,
|
"writedescription": false,
|
||||||
"writeinfojson": true,
|
"writeinfojson": true,
|
||||||
"writeannotations": false,
|
|
||||||
"writelink": false,
|
"writelink": false,
|
||||||
"writeurllink": false,
|
"writeurllink": false,
|
||||||
"writewebloclink": false,
|
"writewebloclink": false,
|
||||||
|
|||||||
@@ -36,6 +36,18 @@ def do_GET(self):
|
|||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
||||||
|
elif self.path == '/fake.m3u8':
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Length', '1024')
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(1024 * b'\x00')
|
||||||
|
elif self.path == '/bipbop.m3u8':
|
||||||
|
with open('test/testdata/m3u8/bipbop_16x9.m3u8', 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Length', str(len(data)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(data)
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@@ -314,6 +326,20 @@ def test_search_json_ld_realworld(self):
|
|||||||
},
|
},
|
||||||
{},
|
{},
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
# test thumbnail_url key without URL scheme
|
||||||
|
r'''
|
||||||
|
<script type="application/ld+json">
|
||||||
|
{
|
||||||
|
"@context": "https://schema.org",
|
||||||
|
"@type": "VideoObject",
|
||||||
|
"thumbnail_url": "//www.nobelprize.org/images/12693-landscape-medium-gallery.jpg"
|
||||||
|
}</script>''',
|
||||||
|
{
|
||||||
|
'thumbnails': [{'url': 'https://www.nobelprize.org/images/12693-landscape-medium-gallery.jpg'}],
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
||||||
expect_dict(
|
expect_dict(
|
||||||
@@ -1919,7 +1945,7 @@ def test_response_with_expected_status_returns_content(self):
|
|||||||
server_thread.daemon = True
|
server_thread.daemon = True
|
||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
(content, urlh) = self.ie._download_webpage_handle(
|
content, _ = self.ie._download_webpage_handle(
|
||||||
f'http://127.0.0.1:{port}/teapot', None,
|
f'http://127.0.0.1:{port}/teapot', None,
|
||||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||||
@@ -1933,6 +1959,208 @@ def test_search_nextjs_data(self):
|
|||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
||||||
|
|
||||||
|
def test_search_nextjs_v13_data(self):
|
||||||
|
HTML = R'''
|
||||||
|
<script>(self.__next_f=self.__next_f||[]).push([0])</script>
|
||||||
|
<script>self.__next_f.push([2,"0:[\"$\",\"$L0\",null,{\"do_not_add_this\":\"fail\"}]\n"])</script>
|
||||||
|
<script>self.__next_f.push([1,"1:I[46975,[],\"HTTPAccessFallbackBoundary\"]\n2:I[32630,[\"8183\",\"static/chunks/8183-768193f6a9e33cdd.js\"]]\n"])</script>
|
||||||
|
<script nonce="abc123">self.__next_f.push([1,"e:[false,[\"$\",\"div\",null,{\"children\":[\"$\",\"$L18\",null,{\"foo\":\"bar\"}]}],false]\n "])</script>
|
||||||
|
<script>self.__next_f.push([1,"2a:[[\"$\",\"div\",null,{\"className\":\"flex flex-col\",\"children\":[]}],[\"$\",\"$L16\",null,{\"meta\":{\"dateCreated\":1730489700,\"uuid\":\"40cac41d-8d29-4ef5-aa11-75047b9f0907\"}}]]\n"])</script>
|
||||||
|
<script>self.__next_f.push([1,"df:[\"$undefined\",[\"$\",\"div\",null,{\"children\":[\"$\",\"$L17\",null,{}],\"do_not_include_this_field\":\"fail\"}],[\"$\",\"div\",null,{\"children\":[[\"$\",\"$L19\",null,{\"duplicated_field_name\":{\"x\":1}}],[\"$\",\"$L20\",null,{\"duplicated_field_name\":{\"y\":2}}]]}],\"$undefined\"]\n"])</script>
|
||||||
|
<script>self.__next_f.push([3,"MzM6WyIkIiwiJEwzMiIsbnVsbCx7ImRlY29kZWQiOiJzdWNjZXNzIn1d"])</script>
|
||||||
|
'''
|
||||||
|
EXPECTED = {
|
||||||
|
'18': {
|
||||||
|
'foo': 'bar',
|
||||||
|
},
|
||||||
|
'16': {
|
||||||
|
'meta': {
|
||||||
|
'dateCreated': 1730489700,
|
||||||
|
'uuid': '40cac41d-8d29-4ef5-aa11-75047b9f0907',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'19': {
|
||||||
|
'duplicated_field_name': {'x': 1},
|
||||||
|
},
|
||||||
|
'20': {
|
||||||
|
'duplicated_field_name': {'y': 2},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data(HTML, None), EXPECTED)
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data('', None, fatal=False), {})
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data(None, None, fatal=False), {})
|
||||||
|
|
||||||
|
def test_search_nuxt_json(self):
|
||||||
|
HTML_TMPL = '<script data-ssr="true" id="__NUXT_DATA__" type="application/json">[{}]</script>'
|
||||||
|
VALID_DATA = '''
|
||||||
|
["ShallowReactive",1],
|
||||||
|
{"data":2,"state":21,"once":25,"_errors":28,"_server_errors":30},
|
||||||
|
["ShallowReactive",3],
|
||||||
|
{"$abcdef123456":4},
|
||||||
|
{"podcast":5,"activeEpisodeData":7},
|
||||||
|
{"podcast":6,"seasons":14},
|
||||||
|
{"title":10,"id":11},
|
||||||
|
["Reactive",8],
|
||||||
|
{"episode":9,"creators":18,"empty_list":20},
|
||||||
|
{"title":12,"id":13,"refs":34,"empty_refs":35},
|
||||||
|
"Series Title",
|
||||||
|
"podcast-id-01",
|
||||||
|
"Episode Title",
|
||||||
|
"episode-id-99",
|
||||||
|
[15,16,17],
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
[19],
|
||||||
|
"Podcast Creator",
|
||||||
|
[],
|
||||||
|
{"$ssite-config":22},
|
||||||
|
{"env":23,"name":24,"map":26,"numbers":14},
|
||||||
|
"production",
|
||||||
|
"podcast-website",
|
||||||
|
["Set"],
|
||||||
|
["Reactive",27],
|
||||||
|
["Map"],
|
||||||
|
["ShallowReactive",29],
|
||||||
|
{},
|
||||||
|
["NuxtError",31],
|
||||||
|
{"status":32,"message":33},
|
||||||
|
503,
|
||||||
|
"Service Unavailable",
|
||||||
|
[36,37],
|
||||||
|
[38,39],
|
||||||
|
["Ref",40],
|
||||||
|
["ShallowRef",41],
|
||||||
|
["EmptyRef",42],
|
||||||
|
["EmptyShallowRef",43],
|
||||||
|
"ref",
|
||||||
|
"shallow_ref",
|
||||||
|
"{\\"ref\\":1}",
|
||||||
|
"{\\"shallow_ref\\":2}"
|
||||||
|
'''
|
||||||
|
PAYLOAD = {
|
||||||
|
'data': {
|
||||||
|
'$abcdef123456': {
|
||||||
|
'podcast': {
|
||||||
|
'podcast': {
|
||||||
|
'title': 'Series Title',
|
||||||
|
'id': 'podcast-id-01',
|
||||||
|
},
|
||||||
|
'seasons': [1, 2, 3],
|
||||||
|
},
|
||||||
|
'activeEpisodeData': {
|
||||||
|
'episode': {
|
||||||
|
'title': 'Episode Title',
|
||||||
|
'id': 'episode-id-99',
|
||||||
|
'refs': ['ref', 'shallow_ref'],
|
||||||
|
'empty_refs': [{'ref': 1}, {'shallow_ref': 2}],
|
||||||
|
},
|
||||||
|
'creators': ['Podcast Creator'],
|
||||||
|
'empty_list': [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'state': {
|
||||||
|
'$ssite-config': {
|
||||||
|
'env': 'production',
|
||||||
|
'name': 'podcast-website',
|
||||||
|
'map': [],
|
||||||
|
'numbers': [1, 2, 3],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'once': [],
|
||||||
|
'_errors': {},
|
||||||
|
'_server_errors': {
|
||||||
|
'status': 503,
|
||||||
|
'message': 'Service Unavailable',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
PARTIALLY_INVALID = [(
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
{"invalid_raw_list":2},
|
||||||
|
[15,16,17]
|
||||||
|
''',
|
||||||
|
{'data': {'invalid_raw_list': [None, None, None]}},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyShallowRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
)]
|
||||||
|
INVALID = [
|
||||||
|
'''
|
||||||
|
[]
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
["unsupported",1],
|
||||||
|
{"data":2},
|
||||||
|
{}
|
||||||
|
''',
|
||||||
|
]
|
||||||
|
DEFAULT = object()
|
||||||
|
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json(HTML_TMPL.format(VALID_DATA), None), PAYLOAD)
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json('', None, fatal=False), {})
|
||||||
|
self.assertIs(self.ie._search_nuxt_json('', None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
for data, expected in PARTIALLY_INVALID:
|
||||||
|
self.assertEqual(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, fatal=False), expected)
|
||||||
|
|
||||||
|
for data in INVALID:
|
||||||
|
self.assertIs(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
|
||||||
|
class TestInfoExtractorNetwork(unittest.TestCase):
|
||||||
|
def setUp(self, /):
|
||||||
|
self.httpd = http.server.HTTPServer(
|
||||||
|
('127.0.0.1', 0), InfoExtractorTestRequestHandler)
|
||||||
|
self.port = http_server_port(self.httpd)
|
||||||
|
|
||||||
|
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||||
|
self.server_thread.daemon = True
|
||||||
|
self.server_thread.start()
|
||||||
|
|
||||||
|
self.called = False
|
||||||
|
|
||||||
|
def require_warning(*args, **kwargs):
|
||||||
|
self.called = True
|
||||||
|
|
||||||
|
self.ydl = FakeYDL()
|
||||||
|
self.ydl.report_warning = require_warning
|
||||||
|
self.ie = DummyIE(self.ydl)
|
||||||
|
|
||||||
|
def tearDown(self, /):
|
||||||
|
self.ydl.close()
|
||||||
|
self.httpd.shutdown()
|
||||||
|
self.httpd.server_close()
|
||||||
|
self.server_thread.join(1)
|
||||||
|
|
||||||
|
def test_extract_m3u8_formats(self):
|
||||||
|
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||||
|
f'http://127.0.0.1:{self.port}/bipbop.m3u8', None, fatal=False)
|
||||||
|
self.assertFalse(self.called)
|
||||||
|
self.assertTrue(formats)
|
||||||
|
self.assertTrue(subtitles)
|
||||||
|
|
||||||
|
def test_extract_m3u8_formats_warning(self):
|
||||||
|
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||||
|
f'http://127.0.0.1:{self.port}/fake.m3u8', None, fatal=False)
|
||||||
|
self.assertTrue(self.called, 'Warning was not issued for binary m3u8 file')
|
||||||
|
self.assertFalse(formats)
|
||||||
|
self.assertFalse(subtitles)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -1435,6 +1435,27 @@ def test_load_plugins_compat(self):
|
|||||||
FakeYDL().close()
|
FakeYDL().close()
|
||||||
assert all_plugins_loaded.value
|
assert all_plugins_loaded.value
|
||||||
|
|
||||||
|
def test_close_hooks(self):
|
||||||
|
# Should call all registered close hooks on close
|
||||||
|
close_hook_called = False
|
||||||
|
close_hook_two_called = False
|
||||||
|
|
||||||
|
def close_hook():
|
||||||
|
nonlocal close_hook_called
|
||||||
|
close_hook_called = True
|
||||||
|
|
||||||
|
def close_hook_two():
|
||||||
|
nonlocal close_hook_two_called
|
||||||
|
close_hook_two_called = True
|
||||||
|
|
||||||
|
ydl = FakeYDL()
|
||||||
|
ydl.add_close_hook(close_hook)
|
||||||
|
ydl.add_close_hook(close_hook_two)
|
||||||
|
|
||||||
|
ydl.close()
|
||||||
|
self.assertTrue(close_hook_called, 'Close hook was not called')
|
||||||
|
self.assertTrue(close_hook_two_called, 'Close hook two was not called')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@@ -12,7 +13,7 @@
|
|||||||
|
|
||||||
from yt_dlp import compat
|
from yt_dlp import compat
|
||||||
from yt_dlp.compat import urllib # isort: split
|
from yt_dlp.compat import urllib # isort: split
|
||||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser
|
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||||
from yt_dlp.compat.urllib.request import getproxies
|
from yt_dlp.compat.urllib.request import getproxies
|
||||||
|
|
||||||
|
|
||||||
@@ -21,9 +22,6 @@ def test_compat_passthrough(self):
|
|||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
_ = compat.compat_basestring
|
_ = compat.compat_basestring
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
|
||||||
_ = compat.WINDOWS_VT_MODE
|
|
||||||
|
|
||||||
self.assertEqual(urllib.request.getproxies, getproxies)
|
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
@@ -62,6 +60,45 @@ def test_compat_etree_fromstring_doctype(self):
|
|||||||
def test_struct_unpack(self):
|
def test_struct_unpack(self):
|
||||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||||
|
|
||||||
|
def test_compat_datetime_from_timestamp(self):
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(3600),
|
||||||
|
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-86400),
|
||||||
|
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0.5),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1.000001),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1.25),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1577923200),
|
||||||
|
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(4102444800),
|
||||||
|
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(173568960000),
|
||||||
|
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -58,6 +58,14 @@ def test_get_desktop_environment(self):
|
|||||||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'my_custom_de', 'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
|
||||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
|
|||||||
235
test/test_devalue.py
Normal file
235
test/test_devalue.py
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from yt_dlp.utils.jslib import devalue
|
||||||
|
|
||||||
|
|
||||||
|
TEST_CASES_EQUALS = [{
|
||||||
|
'name': 'int',
|
||||||
|
'unparsed': [-42],
|
||||||
|
'parsed': -42,
|
||||||
|
}, {
|
||||||
|
'name': 'str',
|
||||||
|
'unparsed': ['woo!!!'],
|
||||||
|
'parsed': 'woo!!!',
|
||||||
|
}, {
|
||||||
|
'name': 'Number',
|
||||||
|
'unparsed': [['Object', 42]],
|
||||||
|
'parsed': 42,
|
||||||
|
}, {
|
||||||
|
'name': 'String',
|
||||||
|
'unparsed': [['Object', 'yar']],
|
||||||
|
'parsed': 'yar',
|
||||||
|
}, {
|
||||||
|
'name': 'Infinity',
|
||||||
|
'unparsed': -4,
|
||||||
|
'parsed': math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative Infinity',
|
||||||
|
'unparsed': -5,
|
||||||
|
'parsed': -math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative zero',
|
||||||
|
'unparsed': -6,
|
||||||
|
'parsed': -0.0,
|
||||||
|
}, {
|
||||||
|
'name': 'RegExp',
|
||||||
|
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||||
|
'parsed': re.compile('regexp'),
|
||||||
|
}, {
|
||||||
|
'name': 'Date',
|
||||||
|
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||||
|
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||||
|
}, {
|
||||||
|
'name': 'Array',
|
||||||
|
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||||
|
'parsed': ['a', 'b', 'c'],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (empty)',
|
||||||
|
'unparsed': [[]],
|
||||||
|
'parsed': [],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (sparse)',
|
||||||
|
'unparsed': [[-2, 1, -2], 'b'],
|
||||||
|
'parsed': [None, 'b', None],
|
||||||
|
}, {
|
||||||
|
'name': 'Object',
|
||||||
|
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||||
|
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||||
|
}, {
|
||||||
|
'name': 'Set',
|
||||||
|
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'Map',
|
||||||
|
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||||
|
'parsed': [['a', 'b']],
|
||||||
|
}, {
|
||||||
|
'name': 'BigInt',
|
||||||
|
'unparsed': [['BigInt', '1']],
|
||||||
|
'parsed': 1,
|
||||||
|
}, {
|
||||||
|
'name': 'Uint8Array',
|
||||||
|
'unparsed': [['Uint8Array', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'ArrayBuffer',
|
||||||
|
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'str (repetition)',
|
||||||
|
'unparsed': [[1, 1], 'a string'],
|
||||||
|
'parsed': ['a string', 'a string'],
|
||||||
|
}, {
|
||||||
|
'name': 'None (repetition)',
|
||||||
|
'unparsed': [[1, 1], None],
|
||||||
|
'parsed': [None, None],
|
||||||
|
}, {
|
||||||
|
'name': 'dict (repetition)',
|
||||||
|
'unparsed': [[1, 1], {}],
|
||||||
|
'parsed': [{}, {}],
|
||||||
|
}, {
|
||||||
|
'name': 'Object without prototype',
|
||||||
|
'unparsed': [['null']],
|
||||||
|
'parsed': {},
|
||||||
|
}, {
|
||||||
|
'name': 'cross-realm POJO',
|
||||||
|
'unparsed': [{}],
|
||||||
|
'parsed': {},
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_IS = [{
|
||||||
|
'name': 'bool',
|
||||||
|
'unparsed': [True],
|
||||||
|
'parsed': True,
|
||||||
|
}, {
|
||||||
|
'name': 'Boolean',
|
||||||
|
'unparsed': [['Object', False]],
|
||||||
|
'parsed': False,
|
||||||
|
}, {
|
||||||
|
'name': 'undefined',
|
||||||
|
'unparsed': -1,
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': [None],
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'NaN',
|
||||||
|
'unparsed': -3,
|
||||||
|
'parsed': math.nan,
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_INVALID = [{
|
||||||
|
'name': 'empty string',
|
||||||
|
'unparsed': '',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'hole',
|
||||||
|
'unparsed': -2,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'string',
|
||||||
|
'unparsed': 'hello',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'number',
|
||||||
|
'unparsed': 42,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'boolean',
|
||||||
|
'unparsed': True,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': None,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'object',
|
||||||
|
'unparsed': {},
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'empty array',
|
||||||
|
'unparsed': [],
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected a non-empty list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'Python negative indexing',
|
||||||
|
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||||
|
'error': IndexError,
|
||||||
|
'pattern': r'invalid index: -7',
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class TestDevalue(unittest.TestCase):
|
||||||
|
def test_devalue_parse_equals(self):
|
||||||
|
for tc in TEST_CASES_EQUALS:
|
||||||
|
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_is(self):
|
||||||
|
for tc in TEST_CASES_IS:
|
||||||
|
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_invalid(self):
|
||||||
|
for tc in TEST_CASES_INVALID:
|
||||||
|
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||||
|
devalue.parse(tc['unparsed'])
|
||||||
|
|
||||||
|
def test_devalue_parse_cyclical(self):
|
||||||
|
name = 'Map (cyclical)'
|
||||||
|
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||||
|
self.assertEqual(result[0][0], 'self', name)
|
||||||
|
self.assertIs(result, result[0][1], name)
|
||||||
|
|
||||||
|
name = 'Set (cyclical)'
|
||||||
|
result = devalue.parse([['Set', 0, 1], 42])
|
||||||
|
self.assertEqual(result[1], 42, name)
|
||||||
|
self.assertIs(result, result[0], name)
|
||||||
|
|
||||||
|
result = devalue.parse([[0]])
|
||||||
|
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||||
|
|
||||||
|
name = 'Object (cyclical)'
|
||||||
|
result = devalue.parse([{'self': 0}])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Object with null prototype (cyclical)'
|
||||||
|
result = devalue.parse([['null', 'self', 0]])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Objects (cyclical)'
|
||||||
|
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||||
|
self.assertIs(result[0], result[1]['first'], name)
|
||||||
|
self.assertIs(result[1], result[0]['second'], name)
|
||||||
|
|
||||||
|
def test_devalue_parse_revivers(self):
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||||
|
{'a': 'b'}, 'revivers (indirect)')
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||||
|
{'a': 0}, 'revivers (parse)')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
from test.helper import (
|
from test.helper import (
|
||||||
assertGreaterEqual,
|
assertGreaterEqual,
|
||||||
|
assertLessEqual,
|
||||||
expect_info_dict,
|
expect_info_dict,
|
||||||
expect_warnings,
|
expect_warnings,
|
||||||
get_params,
|
get_params,
|
||||||
@@ -65,10 +66,6 @@ def _file_md5(fn):
|
|||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestDownload(unittest.TestCase):
|
class TestDownload(unittest.TestCase):
|
||||||
# Parallel testing in nosetests. See
|
|
||||||
# http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
|
|
||||||
_multiprocess_shared_ = True
|
|
||||||
|
|
||||||
maxDiff = None
|
maxDiff = None
|
||||||
|
|
||||||
COMPLETED_TESTS = {}
|
COMPLETED_TESTS = {}
|
||||||
@@ -121,10 +118,13 @@ def print_skipping(reason):
|
|||||||
params = get_params(test_case.get('params', {}))
|
params = get_params(test_case.get('params', {}))
|
||||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||||
if is_playlist and 'playlist' not in test_case:
|
if is_playlist and 'playlist' not in test_case:
|
||||||
params.setdefault('extract_flat', 'in_playlist')
|
params.setdefault('playlistend', max(
|
||||||
params.setdefault('playlistend', test_case.get(
|
test_case.get('playlist_mincount', -1),
|
||||||
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
test_case.get('playlist_count', -2) + 1,
|
||||||
|
test_case.get('playlist_maxcount', -2) + 1))
|
||||||
params.setdefault('skip_download', True)
|
params.setdefault('skip_download', True)
|
||||||
|
if 'playlist_duration_sum' not in test_case:
|
||||||
|
params.setdefault('extract_flat', 'in_playlist')
|
||||||
|
|
||||||
ydl = YoutubeDL(params, auto_init=False)
|
ydl = YoutubeDL(params, auto_init=False)
|
||||||
ydl.add_default_info_extractors()
|
ydl.add_default_info_extractors()
|
||||||
@@ -159,6 +159,7 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
||||||
try_rm_tcs_files()
|
try_rm_tcs_files()
|
||||||
try:
|
try:
|
||||||
|
test_url = test_case['url']
|
||||||
try_num = 1
|
try_num = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
@@ -166,7 +167,7 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
# for outside error handling, and returns the exit code
|
# for outside error handling, and returns the exit code
|
||||||
# instead of the result dict.
|
# instead of the result dict.
|
||||||
res_dict = ydl.extract_info(
|
res_dict = ydl.extract_info(
|
||||||
test_case['url'],
|
test_url,
|
||||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||||
except (DownloadError, ExtractorError) as err:
|
except (DownloadError, ExtractorError) as err:
|
||||||
# Check if the exception is not a network related one
|
# Check if the exception is not a network related one
|
||||||
@@ -194,23 +195,23 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
self.assertTrue('entries' in res_dict)
|
self.assertTrue('entries' in res_dict)
|
||||||
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
||||||
|
|
||||||
|
num_entries = len(res_dict.get('entries', []))
|
||||||
if 'playlist_mincount' in test_case:
|
if 'playlist_mincount' in test_case:
|
||||||
|
mincount = test_case['playlist_mincount']
|
||||||
assertGreaterEqual(
|
assertGreaterEqual(
|
||||||
self,
|
self, num_entries, mincount,
|
||||||
len(res_dict['entries']),
|
f'Expected at least {mincount} entries in playlist {test_url}, but got only {num_entries}')
|
||||||
test_case['playlist_mincount'],
|
|
||||||
'Expected at least %d in playlist %s, but got only %d' % (
|
|
||||||
test_case['playlist_mincount'], test_case['url'],
|
|
||||||
len(res_dict['entries'])))
|
|
||||||
if 'playlist_count' in test_case:
|
if 'playlist_count' in test_case:
|
||||||
|
count = test_case['playlist_count']
|
||||||
|
got = num_entries if num_entries <= count else 'more'
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len(res_dict['entries']),
|
num_entries, count,
|
||||||
test_case['playlist_count'],
|
f'Expected exactly {count} entries in playlist {test_url}, but got {got}')
|
||||||
'Expected %d entries in playlist %s, but got %d.' % (
|
if 'playlist_maxcount' in test_case:
|
||||||
test_case['playlist_count'],
|
maxcount = test_case['playlist_maxcount']
|
||||||
test_case['url'],
|
assertLessEqual(
|
||||||
len(res_dict['entries']),
|
self, num_entries, maxcount,
|
||||||
))
|
f'Expected at most {maxcount} entries in playlist {test_url}, but got more')
|
||||||
if 'playlist_duration_sum' in test_case:
|
if 'playlist_duration_sum' in test_case:
|
||||||
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
|||||||
@@ -478,6 +478,69 @@ def test_extract_function_with_global_stack(self):
|
|||||||
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
||||||
self.assertEqual(func([1]), 1111)
|
self.assertEqual(func([1]), 1111)
|
||||||
|
|
||||||
|
def test_extract_object(self):
|
||||||
|
jsi = JSInterpreter('var a={};a.xy={};var xy;var zxy={};xy={z:function(){return "abc"}};')
|
||||||
|
self.assertTrue('z' in jsi.extract_object('xy', None))
|
||||||
|
|
||||||
|
def test_increment_decrement(self):
|
||||||
|
self._test('function f() { var x = 1; return ++x; }', 2)
|
||||||
|
self._test('function f() { var x = 1; return x++; }', 1)
|
||||||
|
self._test('function f() { var x = 1; x--; return x }', 0)
|
||||||
|
self._test('function f() { var y; var x = 1; x++, --x, x--, x--, y="z", "abc", x++; return --x }', -1)
|
||||||
|
self._test('function f() { var a = "test--"; return a; }', 'test--')
|
||||||
|
self._test('function f() { var b = 1; var a = "b--"; return a; }', 'b--')
|
||||||
|
|
||||||
|
def test_nested_function_scoping(self):
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var g = function() {
|
||||||
|
var P = 2;
|
||||||
|
return P;
|
||||||
|
};
|
||||||
|
var P = 1;
|
||||||
|
g();
|
||||||
|
return P;
|
||||||
|
}
|
||||||
|
''', 1)
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var x = function() {
|
||||||
|
for (var w = 1, M = []; w < 2; w++) switch (w) {
|
||||||
|
case 1:
|
||||||
|
M.push("a");
|
||||||
|
case 2:
|
||||||
|
M.push("b");
|
||||||
|
}
|
||||||
|
return M
|
||||||
|
};
|
||||||
|
var w = "c";
|
||||||
|
var M = "d";
|
||||||
|
var y = x();
|
||||||
|
y.push(w);
|
||||||
|
y.push(M);
|
||||||
|
return y;
|
||||||
|
}
|
||||||
|
''', ['a', 'b', 'c', 'd'])
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var P, Q;
|
||||||
|
var z = 100;
|
||||||
|
var g = function() {
|
||||||
|
var P, Q; P = 2; Q = 15;
|
||||||
|
z = 0;
|
||||||
|
return P+Q;
|
||||||
|
};
|
||||||
|
P = 1; Q = 10;
|
||||||
|
var x = g(), y = 3;
|
||||||
|
return P+Q+x+y+z;
|
||||||
|
}
|
||||||
|
''', 31)
|
||||||
|
|
||||||
|
def test_undefined_varnames(self):
|
||||||
|
jsi = JSInterpreter('function f(){ var a; return [a, b]; }')
|
||||||
|
self._test(jsi, [JS_Undefined, JS_Undefined])
|
||||||
|
self.assertEqual(jsi._undefined_varnames, {'b'})
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -22,7 +22,6 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib.error
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
import zlib
|
import zlib
|
||||||
@@ -223,10 +222,7 @@ def do_GET(self):
|
|||||||
if encoding == 'br' and brotli:
|
if encoding == 'br' and brotli:
|
||||||
payload = brotli.compress(payload)
|
payload = brotli.compress(payload)
|
||||||
elif encoding == 'gzip':
|
elif encoding == 'gzip':
|
||||||
buf = io.BytesIO()
|
payload = gzip.compress(payload, mtime=0)
|
||||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
|
||||||
f.write(payload)
|
|
||||||
payload = buf.getvalue()
|
|
||||||
elif encoding == 'deflate':
|
elif encoding == 'deflate':
|
||||||
payload = zlib.compress(payload)
|
payload = zlib.compress(payload)
|
||||||
elif encoding == 'unsupported':
|
elif encoding == 'unsupported':
|
||||||
@@ -729,6 +725,17 @@ def test_keep_header_casing(self, handler):
|
|||||||
|
|
||||||
assert 'X-test-heaDer: test' in res
|
assert 'X-test-heaDer: test' in res
|
||||||
|
|
||||||
|
def test_partial_read_then_full_read(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
for encoding in ('', 'gzip', 'deflate'):
|
||||||
|
res = validate_and_send(rh, Request(
|
||||||
|
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||||
|
headers={'ytdl-encoding': encoding}))
|
||||||
|
assert res.headers.get('Content-Encoding') == encoding
|
||||||
|
assert res.read(6) == b'<html>'
|
||||||
|
assert res.read(0) == b''
|
||||||
|
assert res.read() == b'<video src="/vid.mp4" /></html>'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
class TestClientCertificate:
|
class TestClientCertificate:
|
||||||
|
|||||||
@@ -20,7 +20,6 @@
|
|||||||
add_accept_encoding_header,
|
add_accept_encoding_header,
|
||||||
get_redirect_method,
|
get_redirect_method,
|
||||||
make_socks_proxy_opts,
|
make_socks_proxy_opts,
|
||||||
select_proxy,
|
|
||||||
ssl_load_certs,
|
ssl_load_certs,
|
||||||
)
|
)
|
||||||
from yt_dlp.networking.exceptions import (
|
from yt_dlp.networking.exceptions import (
|
||||||
@@ -28,7 +27,7 @@
|
|||||||
IncompleteRead,
|
IncompleteRead,
|
||||||
)
|
)
|
||||||
from yt_dlp.socks import ProxyType
|
from yt_dlp.socks import ProxyType
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
from yt_dlp.utils.networking import HTTPHeaderDict, select_proxy
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def test_default_overwrites(self):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' in sout)
|
self.assertTrue(b'has already been downloaded' in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||||
@@ -41,7 +41,7 @@ def test_yes_overwrites(self):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' not in sout)
|
self.assertTrue(b'has already been downloaded' not in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||||
|
|||||||
@@ -8,6 +8,8 @@
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.utils import shell_quote
|
from yt_dlp.utils import shell_quote
|
||||||
from yt_dlp.postprocessor import (
|
from yt_dlp.postprocessor import (
|
||||||
@@ -47,7 +49,18 @@ def test_escaping(self):
|
|||||||
print('Skipping: ffmpeg not found')
|
print('Skipping: ffmpeg not found')
|
||||||
return
|
return
|
||||||
|
|
||||||
file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
|
test_data_dir = 'test/testdata/thumbnails'
|
||||||
|
generated_file = f'{test_data_dir}/empty.webp'
|
||||||
|
|
||||||
|
subprocess.check_call([
|
||||||
|
pp.executable, '-y', '-f', 'lavfi', '-i', 'color=c=black:s=320x320',
|
||||||
|
'-c:v', 'libwebp', '-pix_fmt', 'yuv420p', '-vframes', '1', generated_file,
|
||||||
|
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
file = test_data_dir + '/foo %d bar/foo_%d.{}'
|
||||||
|
initial_file = file.format('webp')
|
||||||
|
os.replace(generated_file, initial_file)
|
||||||
|
|
||||||
tests = (('webp', 'png'), ('png', 'jpg'))
|
tests = (('webp', 'png'), ('png', 'jpg'))
|
||||||
|
|
||||||
for inp, out in tests:
|
for inp, out in tests:
|
||||||
@@ -55,11 +68,13 @@ def test_escaping(self):
|
|||||||
if os.path.exists(out_file):
|
if os.path.exists(out_file):
|
||||||
os.remove(out_file)
|
os.remove(out_file)
|
||||||
pp.convert_thumbnail(file.format(inp), out)
|
pp.convert_thumbnail(file.format(inp), out)
|
||||||
assert os.path.exists(out_file)
|
self.assertTrue(os.path.exists(out_file))
|
||||||
|
|
||||||
for _, out in tests:
|
for _, out in tests:
|
||||||
os.remove(file.format(out))
|
os.remove(file.format(out))
|
||||||
|
|
||||||
|
os.remove(initial_file)
|
||||||
|
|
||||||
|
|
||||||
class TestExec(unittest.TestCase):
|
class TestExec(unittest.TestCase):
|
||||||
def test_parse_cmd(self):
|
def test_parse_cmd(self):
|
||||||
@@ -610,3 +625,7 @@ def test_quote_for_concat_QuotesAtEnd(self):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
r"'special '\'' characters '\'' galore'\'\'\'",
|
r"'special '\'' characters '\'' galore'\'\'\'",
|
||||||
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
|||||||
71
test/test_pot/conftest.py
Normal file
71
test/test_pot/conftest.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import collections
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
|
||||||
|
class MockLogger(IEContentProviderLogger):
|
||||||
|
|
||||||
|
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.messages = collections.defaultdict(list)
|
||||||
|
|
||||||
|
def trace(self, message: str):
|
||||||
|
self.messages['trace'].append(message)
|
||||||
|
|
||||||
|
def debug(self, message: str):
|
||||||
|
self.messages['debug'].append(message)
|
||||||
|
|
||||||
|
def info(self, message: str):
|
||||||
|
self.messages['info'].append(message)
|
||||||
|
|
||||||
|
def warning(self, message: str, *, once=False):
|
||||||
|
self.messages['warning'].append(message)
|
||||||
|
|
||||||
|
def error(self, message: str):
|
||||||
|
self.messages['error'].append(message)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ie() -> InfoExtractor:
|
||||||
|
ydl = YoutubeDL()
|
||||||
|
return ydl.get_info_extractor('Youtube')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logger() -> MockLogger:
|
||||||
|
return MockLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request() -> PoTokenRequest:
|
||||||
|
return PoTokenRequest(
|
||||||
|
context=PoTokenContext.GVS,
|
||||||
|
innertube_context={'client': {'clientName': 'WEB'}},
|
||||||
|
innertube_host='youtube.com',
|
||||||
|
session_index=None,
|
||||||
|
player_url=None,
|
||||||
|
is_authenticated=False,
|
||||||
|
video_webpage=None,
|
||||||
|
|
||||||
|
visitor_data='example-visitor-data',
|
||||||
|
data_sync_id='example-data-sync-id',
|
||||||
|
video_id='example-video-id',
|
||||||
|
|
||||||
|
request_cookiejar=YoutubeDLCookieJar(),
|
||||||
|
request_proxy=None,
|
||||||
|
request_headers=HTTPHeaderDict(),
|
||||||
|
request_timeout=None,
|
||||||
|
request_source_address=None,
|
||||||
|
request_verify_tls=True,
|
||||||
|
|
||||||
|
bypass_cache=False,
|
||||||
|
)
|
||||||
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||||
|
|
||||||
|
|
||||||
|
class TestMemoryLRUPCS:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||||
|
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||||
|
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||||
|
|
||||||
|
def test_initialization(self, pcp):
|
||||||
|
assert pcp.PROVIDER_NAME == 'memory'
|
||||||
|
assert pcp.PROVIDER_VERSION == __version__
|
||||||
|
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcp.is_available()
|
||||||
|
|
||||||
|
def test_store_and_get(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_store_ignore_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_store_override_existing_key(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||||
|
assert len(pcp.cache) == 2
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||||
|
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
|
||||||
|
def test_store_ignore_expired_existing_key(self, pcp):
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_get_key_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_lru_eviction(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') == 'value1'
|
||||||
|
|
||||||
|
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key2') is None
|
||||||
|
|
||||||
|
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') is None
|
||||||
|
assert provider.get('key3') == 'value3'
|
||||||
|
assert provider.get('key4') == 'value4'
|
||||||
|
|
||||||
|
def test_delete(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
pcp.delete('key1')
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
|
||||||
|
def test_use_global_cache_default(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
def test_fail_max_size_change_global(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||||
|
initialize_global_cache(50)
|
||||||
|
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
|
||||||
|
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||||
|
assert memorylru_preference(pcp, pot_request) == 10000
|
||||||
47
test/test_pot/test_pot_builtin_utils.py
Normal file
47
test/test_pot/test_pot_builtin_utils.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetWebPoContentBinding:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||||
|
*[(client, context, is_authenticated, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, expected in [
|
||||||
|
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
])
|
||||||
|
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert get_webpo_content_binding(pot_request) == expected
|
||||||
|
|
||||||
|
def test_extract_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||||
|
|
||||||
|
def test_invalid_visitor_id(self, pot_request):
|
||||||
|
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||||
|
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_no_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'KIDiz6oG'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_invalid_base64(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'invalid-base64'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request(pot_request) -> PoTokenRequest:
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||||
|
return pot_request
|
||||||
|
|
||||||
|
|
||||||
|
class TestWebPoPCSP:
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||||
|
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
def test_init(self, ie, logger):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.PROVIDER_NAME == 'webpo'
|
||||||
|
assert pcs.PROVIDER_VERSION == __version__
|
||||||
|
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcs.is_available()
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||||
|
('ANDROID', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.PLAYER, False),
|
||||||
|
])
|
||||||
|
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert pcs.generate_cache_spec(pot_request) is None
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||||
|
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||||
|
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||||
|
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||||
|
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||||
|
|
||||||
|
])
|
||||||
|
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||||
|
pot_request.request_source_address = source_address
|
||||||
|
pot_request.request_proxy = request_proxy
|
||||||
|
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||||
|
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||||
|
|
||||||
|
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||||
|
# Should not bind to visitor id if setting is set to False
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
pot_request.is_authenticated = False
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||||
|
|
||||||
|
def test_default_ttl(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||||
|
|
||||||
|
def test_write_policy(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||||
1529
test/test_pot/test_pot_director.py
Normal file
1529
test/test_pot/test_pot_director.py
Normal file
File diff suppressed because it is too large
Load Diff
629
test/test_pot/test_pot_framework.py
Normal file
629
test/test_pot/test_pot_framework.py
Normal file
@@ -0,0 +1,629 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
ExternalRequestFeature,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import (
|
||||||
|
PoTokenCacheProvider,
|
||||||
|
PoTokenCacheSpec,
|
||||||
|
PoTokenCacheSpecProvider,
|
||||||
|
CacheProviderWritePolicy,
|
||||||
|
)
|
||||||
|
|
||||||
|
import yt_dlp.extractor.youtube.pot.cache as cache
|
||||||
|
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenResponse,
|
||||||
|
PoTokenProvider,
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
provider_bug_report_message,
|
||||||
|
register_provider,
|
||||||
|
register_preference,
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_providers, _ptp_preferences, _pot_pcs_providers, _pot_cache_providers, _pot_cache_provider_preferences
|
||||||
|
|
||||||
|
|
||||||
|
class ExamplePTP(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
_SUPPORTED_CLIENTS = ('WEB',)
|
||||||
|
_SUPPORTED_CONTEXTS = (PoTokenContext.GVS, )
|
||||||
|
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_HTTP,
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_SOCKS5H,
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
return PoTokenResponse('example-token', expires_at=123)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheSpecProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
key_bindings={'field': 'example-key'},
|
||||||
|
default_ttl=60,
|
||||||
|
write_policy=CacheProviderWritePolicy.WRITE_FIRST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_fetch_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
provider = BarebonesProviderPTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_example_provider_success(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'Example'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
response = provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
assert response.po_token == 'example-token'
|
||||||
|
assert response.expires_at == 123
|
||||||
|
|
||||||
|
def test_provider_unsupported_context(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_client(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_proxy_scheme(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.request_proxy = 'socks4://example.com'
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||||
|
'schemes: http, socks5h',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'http://example.com'
|
||||||
|
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_ignore_external_request_features(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = None
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'socks5://example.com'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support setting source address',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.SOURCE_ADDRESS,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support ignoring TLS certificate failures',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.DISABLE_TLS_VERIFICATION,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_request_webpage(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=pot_request)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'example-user-agent'
|
||||||
|
assert sent_request.proxies == {'all': 'socks5://example-proxy.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is cookiejar
|
||||||
|
assert 'Requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_override(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar_request = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar_request
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
headers={'User-Agent': 'override-user-agent-override'},
|
||||||
|
proxies={'http': 'http://example-proxy-override.com'},
|
||||||
|
extensions={'cookiejar': YoutubeDLCookieJar()},
|
||||||
|
), pot_request=pot_request, note='Custom requesting webpage')
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'override-user-agent-override'
|
||||||
|
assert sent_request.proxies == {'http': 'http://example-proxy-override.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is not cookiejar_request
|
||||||
|
assert 'Custom requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_log(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), note=False)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert 'info' not in logger.messages
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_pot_request(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=None)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'invalid-suffix'
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_get_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_store_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_delete_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_is_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheSpecProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheSpecProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_supports_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCS(PoTokenCacheSpecProvider):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCS(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_barebones(self, ie, pot_request, logger):
|
||||||
|
class BarebonesProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
default_ttl=100,
|
||||||
|
key_bindings={},
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).default_ttl == 100
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).key_bindings == {}
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, pot_request, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheSpecProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(pot_request)
|
||||||
|
assert provider.generate_cache_spec(pot_request).key_bindings == {'field': 'example-key'}
|
||||||
|
assert provider.generate_cache_spec(pot_request).default_ttl == 60
|
||||||
|
assert provider.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenRequest:
|
||||||
|
def test_copy_request(self, pot_request):
|
||||||
|
copied_request = pot_request.copy()
|
||||||
|
|
||||||
|
assert copied_request is not pot_request
|
||||||
|
assert copied_request.context == pot_request.context
|
||||||
|
assert copied_request.innertube_context == pot_request.innertube_context
|
||||||
|
assert copied_request.innertube_context is not pot_request.innertube_context
|
||||||
|
copied_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
assert pot_request.innertube_context['client']['clientName'] != 'ANDROID'
|
||||||
|
assert copied_request.innertube_host == pot_request.innertube_host
|
||||||
|
assert copied_request.session_index == pot_request.session_index
|
||||||
|
assert copied_request.player_url == pot_request.player_url
|
||||||
|
assert copied_request.is_authenticated == pot_request.is_authenticated
|
||||||
|
assert copied_request.visitor_data == pot_request.visitor_data
|
||||||
|
assert copied_request.data_sync_id == pot_request.data_sync_id
|
||||||
|
assert copied_request.video_id == pot_request.video_id
|
||||||
|
assert copied_request.request_cookiejar is pot_request.request_cookiejar
|
||||||
|
assert copied_request.request_proxy == pot_request.request_proxy
|
||||||
|
assert copied_request.request_headers == pot_request.request_headers
|
||||||
|
assert copied_request.request_headers is not pot_request.request_headers
|
||||||
|
assert copied_request.request_timeout == pot_request.request_timeout
|
||||||
|
assert copied_request.request_source_address == pot_request.request_source_address
|
||||||
|
assert copied_request.request_verify_tls == pot_request.request_verify_tls
|
||||||
|
assert copied_request.bypass_cache == pot_request.bypass_cache
|
||||||
|
|
||||||
|
|
||||||
|
def test_provider_bug_report_message(ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message = provider_bug_report_message(provider)
|
||||||
|
assert message == '; please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message_before = provider_bug_report_message(provider, before='custom message!')
|
||||||
|
assert message_before == 'custom message! Please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_provider(ie):
|
||||||
|
|
||||||
|
@register_provider
|
||||||
|
class UnavailableProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
assert _pot_providers.value.get('UnavailableProvider') == UnavailableProviderPTP
|
||||||
|
_pot_providers.value.pop('UnavailableProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_pot_preference(ie):
|
||||||
|
before = len(_ptp_preferences.value)
|
||||||
|
|
||||||
|
@register_preference(ExamplePTP)
|
||||||
|
def unavailable_preference(provider: PoTokenProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_ptp_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider(ie):
|
||||||
|
|
||||||
|
@cache.register_provider
|
||||||
|
class UnavailableCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert _pot_cache_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCP
|
||||||
|
_pot_cache_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_spec(ie):
|
||||||
|
|
||||||
|
@cache.register_spec
|
||||||
|
class UnavailableCacheProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
assert _pot_pcs_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCSP
|
||||||
|
_pot_pcs_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_preference(ie):
|
||||||
|
before = len(_pot_cache_provider_preferences.value)
|
||||||
|
|
||||||
|
@cache.register_preference(ExampleCacheProviderPCP)
|
||||||
|
def unavailable_preference(provider: PoTokenCacheProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_pot_cache_provider_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_log_level(logger):
|
||||||
|
assert logger.LogLevel('INFO') == logger.LogLevel.INFO
|
||||||
|
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
||||||
@@ -14,7 +14,6 @@
|
|||||||
NRKTVIE,
|
NRKTVIE,
|
||||||
PBSIE,
|
PBSIE,
|
||||||
CeskaTelevizeIE,
|
CeskaTelevizeIE,
|
||||||
ComedyCentralIE,
|
|
||||||
DailymotionIE,
|
DailymotionIE,
|
||||||
DemocracynowIE,
|
DemocracynowIE,
|
||||||
LyndaIE,
|
LyndaIE,
|
||||||
@@ -279,23 +278,6 @@ def test_allsubtitles(self):
|
|||||||
self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
|
self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
@unittest.skip('IE broken')
|
|
||||||
class TestMTVSubtitles(BaseTestSubtitles):
|
|
||||||
url = 'http://www.cc.com/video-clips/p63lk0/adam-devine-s-house-party-chasing-white-swans'
|
|
||||||
IE = ComedyCentralIE
|
|
||||||
|
|
||||||
def getInfoDict(self):
|
|
||||||
return super().getInfoDict()['entries'][0]
|
|
||||||
|
|
||||||
def test_allsubtitles(self):
|
|
||||||
self.DL.params['writesubtitles'] = True
|
|
||||||
self.DL.params['allsubtitles'] = True
|
|
||||||
subtitles = self.getSubtitles()
|
|
||||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
|
||||||
self.assertEqual(md5(subtitles['en']), '78206b8d8a0cfa9da64dc026eea48961')
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestNRKSubtitles(BaseTestSubtitles):
|
class TestNRKSubtitles(BaseTestSubtitles):
|
||||||
url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
|
url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
|
||||||
|
|||||||
@@ -416,18 +416,8 @@ def test_traversal_unbranching(self):
|
|||||||
'`any` should allow further branching'
|
'`any` should allow further branching'
|
||||||
|
|
||||||
def test_traversal_morsel(self):
|
def test_traversal_morsel(self):
|
||||||
values = {
|
|
||||||
'expires': 'a',
|
|
||||||
'path': 'b',
|
|
||||||
'comment': 'c',
|
|
||||||
'domain': 'd',
|
|
||||||
'max-age': 'e',
|
|
||||||
'secure': 'f',
|
|
||||||
'httponly': 'g',
|
|
||||||
'version': 'h',
|
|
||||||
'samesite': 'i',
|
|
||||||
}
|
|
||||||
morsel = http.cookies.Morsel()
|
morsel = http.cookies.Morsel()
|
||||||
|
values = dict(zip(morsel, 'abcdefghijklmnop'))
|
||||||
morsel.set('item_key', 'item_value', 'coded_value')
|
morsel.set('item_key', 'item_value', 'coded_value')
|
||||||
morsel.update(values)
|
morsel.update(values)
|
||||||
values['key'] = 'item_key'
|
values['key'] = 'item_key'
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
|
|
||||||
from test.helper import FakeYDL, report_warning
|
from test.helper import FakeYDL, report_warning
|
||||||
from yt_dlp.update import UpdateInfo, Updater
|
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
|
||||||
|
|
||||||
|
|
||||||
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||||
@@ -84,8 +84,9 @@
|
|||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lock 2024.10.22 py2exe .+
|
lock 2024.10.22 py2exe .+
|
||||||
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
lock 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
lock 2024.10.22 zip Python 3\.8
|
||||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
'''
|
'''
|
||||||
|
|
||||||
TEST_LOCKFILE_V2_TMPL = r'''%s
|
TEST_LOCKFILE_V2_TMPL = r'''%s
|
||||||
@@ -94,20 +95,23 @@
|
|||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 (?!\w+_exe).+ Python 3\.8
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 (?!\w+_exe).+ Python 3\.8
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
'''
|
'''
|
||||||
|
|
||||||
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
||||||
@@ -217,6 +221,10 @@ def test(lockfile, identifier, input_tag, expect_tag, exact=False, repo='yt-dlp/
|
|||||||
test( # linux_aarch64_exe w/glibc2.3 should only update to glibc<2.31 lock
|
test( # linux_aarch64_exe w/glibc2.3 should only update to glibc<2.31 lock
|
||||||
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.3 (OpenSSL',
|
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.3 (OpenSSL',
|
||||||
'2025.01.01', '2024.10.22')
|
'2025.01.01', '2024.10.22')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11', exact=True)
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', '2025.08.11')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', None, exact=True)
|
||||||
|
|
||||||
# Forks can block updates to non-numeric tags rather than lock
|
# Forks can block updates to non-numeric tags rather than lock
|
||||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
||||||
@@ -272,6 +280,26 @@ def test(target, expected, current_version=None, current_commit=None, identifier
|
|||||||
test('testing', None, current_commit='9' * 40)
|
test('testing', None, current_commit='9' * 40)
|
||||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||||
|
|
||||||
|
def test_make_label(self):
|
||||||
|
STABLE_REPO = UPDATE_SOURCES['stable']
|
||||||
|
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
|
||||||
|
MASTER_REPO = UPDATE_SOURCES['master']
|
||||||
|
|
||||||
|
for inputs, expected in [
|
||||||
|
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
|
||||||
|
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
|
||||||
|
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
|
||||||
|
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
|
||||||
|
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
|
||||||
|
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
|
||||||
|
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
|
||||||
|
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
|
||||||
|
]:
|
||||||
|
result = _make_label(*inputs)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expected,
|
||||||
|
f'{inputs!r} returned {result!r} instead of {expected!r}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import ntpath
|
||||||
import pickle
|
import pickle
|
||||||
import subprocess
|
import subprocess
|
||||||
import unittest
|
import unittest
|
||||||
@@ -71,6 +72,8 @@
|
|||||||
iri_to_uri,
|
iri_to_uri,
|
||||||
is_html,
|
is_html,
|
||||||
js_to_json,
|
js_to_json,
|
||||||
|
jwt_decode_hs256,
|
||||||
|
jwt_encode,
|
||||||
limit_length,
|
limit_length,
|
||||||
locked_file,
|
locked_file,
|
||||||
lowercase_escape,
|
lowercase_escape,
|
||||||
@@ -99,11 +102,13 @@
|
|||||||
remove_start,
|
remove_start,
|
||||||
render_table,
|
render_table,
|
||||||
replace_extension,
|
replace_extension,
|
||||||
|
datetime_round,
|
||||||
rot47,
|
rot47,
|
||||||
sanitize_filename,
|
sanitize_filename,
|
||||||
sanitize_path,
|
sanitize_path,
|
||||||
sanitize_url,
|
sanitize_url,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
|
strftime_or_none,
|
||||||
smuggle_url,
|
smuggle_url,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
strip_jsonp,
|
strip_jsonp,
|
||||||
@@ -249,12 +254,6 @@ def test_sanitize_path(self):
|
|||||||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||||
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||||
|
|
||||||
# Check with nt._path_normpath if available
|
|
||||||
try:
|
|
||||||
from nt import _path_normpath as nt_path_normpath
|
|
||||||
except ImportError:
|
|
||||||
nt_path_normpath = None
|
|
||||||
|
|
||||||
for test, expected in [
|
for test, expected in [
|
||||||
('C:\\', 'C:\\'),
|
('C:\\', 'C:\\'),
|
||||||
('../abc', '..\\abc'),
|
('../abc', '..\\abc'),
|
||||||
@@ -272,8 +271,7 @@ def test_sanitize_path(self):
|
|||||||
result = sanitize_path(test)
|
result = sanitize_path(test)
|
||||||
assert result == expected, f'{test} was incorrectly resolved'
|
assert result == expected, f'{test} was incorrectly resolved'
|
||||||
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||||
if nt_path_normpath:
|
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||||
assert result == nt_path_normpath(test), f'{test} does not match nt._path_normpath'
|
|
||||||
|
|
||||||
def test_sanitize_url(self):
|
def test_sanitize_url(self):
|
||||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||||
@@ -407,6 +405,25 @@ def test_datetime_from_str(self):
|
|||||||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||||
|
|
||||||
|
def test_datetime_round(self):
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||||
|
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||||
|
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
def test_strftime_or_none(self):
|
||||||
|
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||||
|
self.assertEqual(strftime_or_none(0), '19700101')
|
||||||
|
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||||
|
# Throws OverflowError
|
||||||
|
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||||
|
|
||||||
def test_daterange(self):
|
def test_daterange(self):
|
||||||
_20century = DateRange('19000101', '20000101')
|
_20century = DateRange('19000101', '20000101')
|
||||||
self.assertFalse('17890714' in _20century)
|
self.assertFalse('17890714' in _20century)
|
||||||
@@ -1373,6 +1390,7 @@ def test_parse_resolution(self):
|
|||||||
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
||||||
self.assertEqual(parse_resolution('ep1x2'), {})
|
self.assertEqual(parse_resolution('ep1x2'), {})
|
||||||
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
||||||
|
self.assertEqual(parse_resolution('1920w', lenient=True), {'width': 1920})
|
||||||
|
|
||||||
def test_parse_bitrate(self):
|
def test_parse_bitrate(self):
|
||||||
self.assertEqual(parse_bitrate(None), None)
|
self.assertEqual(parse_bitrate(None), None)
|
||||||
@@ -2179,6 +2197,41 @@ def test_partial_application(self):
|
|||||||
assert int_or_none(v=10) == 10, 'keyword passed positional should call function'
|
assert int_or_none(v=10) == 10, 'keyword passed positional should call function'
|
||||||
assert int_or_none(scale=0.1)(10) == 100, 'call after partial application should call the function'
|
assert int_or_none(scale=0.1)(10) == 100, 'call after partial application should call the function'
|
||||||
|
|
||||||
|
_JWT_KEY = '12345678'
|
||||||
|
_JWT_HEADERS_1 = {'a': 'b'}
|
||||||
|
_JWT_HEADERS_2 = {'typ': 'JWT', 'alg': 'HS256'}
|
||||||
|
_JWT_HEADERS_3 = {'typ': 'JWT', 'alg': 'RS256'}
|
||||||
|
_JWT_HEADERS_4 = {'c': 'd', 'alg': 'ES256'}
|
||||||
|
_JWT_DECODED = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'qux': 'baz',
|
||||||
|
}
|
||||||
|
_JWT_SIMPLE = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.fKojvTWqnjNTbsdoDTmYNc4tgYAG3h_SWRzM77iLH0U'
|
||||||
|
_JWT_WITH_EXTRA_HEADERS = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImEiOiJiIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.Ia91-B77yasfYM7jsB6iVKLew-3rO6ITjNmjWUVXCvQ'
|
||||||
|
_JWT_WITH_REORDERED_HEADERS = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.slg-7COta5VOfB36p3tqV4MGPV6TTA_ouGnD48UEVq4'
|
||||||
|
_JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.XWp496oVgQnoits0OOocutdjxoaQwn4GUWWxUsKENPM'
|
||||||
|
_JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG = 'eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCIsImMiOiJkIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.oM_tc7IkfrwkoRh43rFFE1wOi3J3mQGwx7_lMyKQqDg'
|
||||||
|
|
||||||
|
def test_jwt_encode(self):
|
||||||
|
def test(expected, headers={}):
|
||||||
|
self.assertEqual(jwt_encode(self._JWT_DECODED, self._JWT_KEY, headers=headers), expected)
|
||||||
|
|
||||||
|
test(self._JWT_SIMPLE)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS, headers=self._JWT_HEADERS_1)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS, headers=self._JWT_HEADERS_2)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG, headers=self._JWT_HEADERS_3)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG, headers=self._JWT_HEADERS_4)
|
||||||
|
|
||||||
|
def test_jwt_decode_hs256(self):
|
||||||
|
def test(inp):
|
||||||
|
self.assertEqual(jwt_decode_hs256(inp), self._JWT_DECODED)
|
||||||
|
|
||||||
|
test(self._JWT_SIMPLE)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ def test_private_info_arg(self):
|
|||||||
'--username', 'johnsmith@gmail.com',
|
'--username', 'johnsmith@gmail.com',
|
||||||
'--password', 'my_secret_password',
|
'--password', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -36,7 +36,7 @@ def test_private_info_shortarg(self):
|
|||||||
'-u', 'johnsmith@gmail.com',
|
'-u', 'johnsmith@gmail.com',
|
||||||
'-p', 'my_secret_password',
|
'-p', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
@@ -50,7 +50,7 @@ def test_private_info_eq(self):
|
|||||||
'--username=johnsmith@gmail.com',
|
'--username=johnsmith@gmail.com',
|
||||||
'--password=my_secret_password',
|
'--password=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -64,7 +64,7 @@ def test_private_info_shortarg_eq(self):
|
|||||||
'-u=johnsmith@gmail.com',
|
'-u=johnsmith@gmail.com',
|
||||||
'-p=my_secret_password',
|
'-p=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@
|
|||||||
import ssl
|
import ssl
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from yt_dlp import socks, traverse_obj
|
from yt_dlp import socks
|
||||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
from yt_dlp.dependencies import websockets
|
from yt_dlp.dependencies import websockets
|
||||||
from yt_dlp.networking import Request
|
from yt_dlp.networking import Request
|
||||||
@@ -32,6 +32,7 @@
|
|||||||
SSLError,
|
SSLError,
|
||||||
TransportError,
|
TransportError,
|
||||||
)
|
)
|
||||||
|
from yt_dlp.utils.traversal import traverse_obj
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
import yt_dlp.extractor
|
|
||||||
import yt_dlp.YoutubeDL
|
|
||||||
from test.helper import get_params, is_download_test, try_rm
|
|
||||||
|
|
||||||
|
|
||||||
class YoutubeDL(yt_dlp.YoutubeDL):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.to_stderr = self.to_screen
|
|
||||||
|
|
||||||
|
|
||||||
params = get_params({
|
|
||||||
'writeannotations': True,
|
|
||||||
'skip_download': True,
|
|
||||||
'writeinfojson': False,
|
|
||||||
'format': 'flv',
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
TEST_ID = 'gr51aVj-mLg'
|
|
||||||
ANNOTATIONS_FILE = TEST_ID + '.annotations.xml'
|
|
||||||
EXPECTED_ANNOTATIONS = ['Speech bubble', 'Note', 'Title', 'Spotlight', 'Label']
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
class TestAnnotations(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
# Clear old files
|
|
||||||
self.tearDown()
|
|
||||||
|
|
||||||
def test_info_json(self):
|
|
||||||
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
|
|
||||||
ie = yt_dlp.extractor.YoutubeIE()
|
|
||||||
ydl = YoutubeDL(params)
|
|
||||||
ydl.add_info_extractor(ie)
|
|
||||||
ydl.download([TEST_ID])
|
|
||||||
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
|
||||||
annoxml = None
|
|
||||||
with open(ANNOTATIONS_FILE, encoding='utf-8') as annof:
|
|
||||||
annoxml = xml.etree.ElementTree.parse(annof)
|
|
||||||
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
|
||||||
root = annoxml.getroot()
|
|
||||||
self.assertEqual(root.tag, 'document')
|
|
||||||
annotationsTag = root.find('annotations')
|
|
||||||
self.assertEqual(annotationsTag.tag, 'annotations')
|
|
||||||
annotations = annotationsTag.findall('annotation')
|
|
||||||
|
|
||||||
# Not all the annotations have TEXT children and the annotations are returned unsorted.
|
|
||||||
for a in annotations:
|
|
||||||
self.assertEqual(a.tag, 'annotation')
|
|
||||||
if a.get('type') == 'text':
|
|
||||||
textTag = a.find('TEXT')
|
|
||||||
text = textTag.text
|
|
||||||
self.assertTrue(text in expected) # assertIn only added in python 2.7
|
|
||||||
# remove the first occurrence, there could be more than one annotation with the same text
|
|
||||||
expected.remove(text)
|
|
||||||
# We should have seen (and removed) all the expected annotation texts.
|
|
||||||
self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
try_rm(ANNOTATIONS_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -133,6 +133,26 @@
|
|||||||
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||||
'IAOAOq0QJ8wRAAgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_E2u-m37KtXJoOySqa0',
|
'IAOAOq0QJ8wRAAgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_E2u-m37KtXJoOySqa0',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/e12fbea4/player_ias.vflset/en_US/base.js',
|
||||||
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||||
|
'JC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-a',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/010fbc8d/player_es5.vflset/en_US/base.js',
|
||||||
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||||
|
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
||||||
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||||
|
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||||
|
'AAJAJfQdSswRAIgNSN0GDUcHnCIXkKcF61yLBgDHiX1sUhOJdY4_GxunRYCIDeYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9bZ',
|
||||||
|
'AJfQdSswRAIgNSN0GDUcHnCIXkKcF61ZLBgDHiX1sUhOJdY4_GxunRYCIDyYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9be',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
_NSIG_TESTS = [
|
_NSIG_TESTS = [
|
||||||
@@ -316,6 +336,74 @@
|
|||||||
'https://www.youtube.com/s/player/8a8ac953/tv-player-es6.vflset/tv-player-es6.js',
|
'https://www.youtube.com/s/player/8a8ac953/tv-player-es6.vflset/tv-player-es6.js',
|
||||||
'MiBYeXx_vRREbiCCmh', 'RtZYMVvmkE0JE',
|
'MiBYeXx_vRREbiCCmh', 'RtZYMVvmkE0JE',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/59b252b9/player_ias.vflset/en_US/base.js',
|
||||||
|
'D3XWVpYgwhLLKNK4AGX', 'aZrQ1qWJ5yv5h',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/fc2a56a5/player_ias.vflset/en_US/base.js',
|
||||||
|
'qTKWg_Il804jd2kAC', 'OtUAm2W6gyzJjB9u',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/fc2a56a5/tv-player-ias.vflset/tv-player-ias.js',
|
||||||
|
'qTKWg_Il804jd2kAC', 'OtUAm2W6gyzJjB9u',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/a74bf670/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', 'hQP7k1hA22OrNTnq',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/6275f73c/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '-I03XF0iyf6I_X0A',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/20c72c18/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '-I03XF0iyf6I_X0A',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/9fe2e06e/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '6r5ekNIiEMPutZy',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/680f8c75/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '0ml9caTwpa55Jf',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/14397202/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', 'ozZFAN21okDdJTa',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5dcb2c1f/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', 'p7iTbRZDYAF',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/a10d7fcc/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '9Zue7DDHJSD',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/8e20cb06/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', '5-4tTneTROTpMzba',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/e12fbea4/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'kM5r52fugSZRAKHfo3', 'XkeRfXIPOkSwfg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/ef259203/player_ias_tce.vflset/en_US/base.js',
|
||||||
|
'rPqBC01nJpqhhi2iA2U', 'hY7dbiKFT51UIA',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/010fbc8d/player_es5.vflset/en_US/base.js',
|
||||||
|
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
||||||
|
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||||
|
'6l5CTNx4AzIqH4MXM', 'NupToduxHBew1g',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
BIN
test/testdata/thumbnails/foo %d bar/foo_%d.webp
vendored
BIN
test/testdata/thumbnails/foo %d bar/foo_%d.webp
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 3.8 KiB |
0
test/testdata/thumbnails/foo %d bar/placeholder
vendored
Normal file
0
test/testdata/thumbnails/foo %d bar/placeholder
vendored
Normal file
@@ -36,6 +36,7 @@
|
|||||||
from .globals import (
|
from .globals import (
|
||||||
IN_CLI,
|
IN_CLI,
|
||||||
LAZY_EXTRACTORS,
|
LAZY_EXTRACTORS,
|
||||||
|
WINDOWS_VT_MODE,
|
||||||
plugin_ies,
|
plugin_ies,
|
||||||
plugin_ies_overrides,
|
plugin_ies_overrides,
|
||||||
plugin_pps,
|
plugin_pps,
|
||||||
@@ -52,7 +53,7 @@
|
|||||||
SSLError,
|
SSLError,
|
||||||
network_exceptions,
|
network_exceptions,
|
||||||
)
|
)
|
||||||
from .networking.impersonate import ImpersonateRequestHandler
|
from .networking.impersonate import ImpersonateRequestHandler, ImpersonateTarget
|
||||||
from .plugins import directories as plugin_directories, load_all_plugins
|
from .plugins import directories as plugin_directories, load_all_plugins
|
||||||
from .postprocessor import (
|
from .postprocessor import (
|
||||||
EmbedThumbnailPP,
|
EmbedThumbnailPP,
|
||||||
@@ -72,6 +73,7 @@
|
|||||||
from .update import (
|
from .update import (
|
||||||
REPOSITORY,
|
REPOSITORY,
|
||||||
_get_system_deprecation,
|
_get_system_deprecation,
|
||||||
|
_get_outdated_warning,
|
||||||
_make_label,
|
_make_label,
|
||||||
current_git_head,
|
current_git_head,
|
||||||
detect_variant,
|
detect_variant,
|
||||||
@@ -302,7 +304,6 @@ class YoutubeDL:
|
|||||||
clean_infojson: Remove internal metadata from the infojson
|
clean_infojson: Remove internal metadata from the infojson
|
||||||
getcomments: Extract video comments. This will not be written to disk
|
getcomments: Extract video comments. This will not be written to disk
|
||||||
unless writeinfojson is also given
|
unless writeinfojson is also given
|
||||||
writeannotations: Write the video annotations to a .annotations.xml file
|
|
||||||
writethumbnail: Write the thumbnail image to a file
|
writethumbnail: Write the thumbnail image to a file
|
||||||
allow_playlist_files: Whether to write playlists' description, infojson etc
|
allow_playlist_files: Whether to write playlists' description, infojson etc
|
||||||
also to disk when using the 'write*' options
|
also to disk when using the 'write*' options
|
||||||
@@ -482,7 +483,8 @@ class YoutubeDL:
|
|||||||
The following options do not work when used through the API:
|
The following options do not work when used through the API:
|
||||||
filename, abort-on-error, multistreams, no-live-chat,
|
filename, abort-on-error, multistreams, no-live-chat,
|
||||||
format-sort, no-clean-infojson, no-playlist-metafiles,
|
format-sort, no-clean-infojson, no-playlist-metafiles,
|
||||||
no-keep-subs, no-attach-info-json, allow-unsafe-ext, prefer-vp9-sort.
|
no-keep-subs, no-attach-info-json, allow-unsafe-ext, prefer-vp9-sort,
|
||||||
|
mtime-by-default.
|
||||||
Refer __init__.py for their implementation
|
Refer __init__.py for their implementation
|
||||||
progress_template: Dictionary of templates for progress outputs.
|
progress_template: Dictionary of templates for progress outputs.
|
||||||
Allowed keys are 'download', 'postprocess',
|
Allowed keys are 'download', 'postprocess',
|
||||||
@@ -490,7 +492,7 @@ class YoutubeDL:
|
|||||||
The template is mapped on a dictionary with keys 'progress' and 'info'
|
The template is mapped on a dictionary with keys 'progress' and 'info'
|
||||||
retry_sleep_functions: Dictionary of functions that takes the number of attempts
|
retry_sleep_functions: Dictionary of functions that takes the number of attempts
|
||||||
as argument and returns the time to sleep in seconds.
|
as argument and returns the time to sleep in seconds.
|
||||||
Allowed keys are 'http', 'fragment', 'file_access'
|
Allowed keys are 'http', 'fragment', 'file_access', 'extractor'
|
||||||
download_ranges: A callback function that gets called for every video with
|
download_ranges: A callback function that gets called for every video with
|
||||||
the signature (info_dict, ydl) -> Iterable[Section].
|
the signature (info_dict, ydl) -> Iterable[Section].
|
||||||
Only the returned sections will be downloaded.
|
Only the returned sections will be downloaded.
|
||||||
@@ -502,16 +504,17 @@ class YoutubeDL:
|
|||||||
force_keyframes_at_cuts: Re-encode the video when downloading ranges to get precise cuts
|
force_keyframes_at_cuts: Re-encode the video when downloading ranges to get precise cuts
|
||||||
noprogress: Do not print the progress bar
|
noprogress: Do not print the progress bar
|
||||||
live_from_start: Whether to download livestreams videos from the start
|
live_from_start: Whether to download livestreams videos from the start
|
||||||
|
warn_when_outdated: Emit a warning if the yt-dlp version is older than 90 days
|
||||||
|
|
||||||
The following parameters are not used by YoutubeDL itself, they are used by
|
The following parameters are not used by YoutubeDL itself, they are used by
|
||||||
the downloader (see yt_dlp/downloader/common.py):
|
the downloader (see yt_dlp/downloader/common.py):
|
||||||
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
||||||
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
||||||
continuedl, xattr_set_filesize, hls_use_mpegts, http_chunk_size,
|
continuedl, hls_use_mpegts, http_chunk_size, external_downloader_args,
|
||||||
external_downloader_args, concurrent_fragment_downloads, progress_delta.
|
concurrent_fragment_downloads, progress_delta.
|
||||||
|
|
||||||
The following options are used by the post processors:
|
The following options are used by the post processors:
|
||||||
ffmpeg_location: Location of the ffmpeg/avconv binary; either the path
|
ffmpeg_location: Location of the ffmpeg binary; either the path
|
||||||
to the binary or its containing directory.
|
to the binary or its containing directory.
|
||||||
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
||||||
and a list of additional command-line arguments for the
|
and a list of additional command-line arguments for the
|
||||||
@@ -528,6 +531,7 @@ class YoutubeDL:
|
|||||||
discontinuities such as ad breaks (default: False)
|
discontinuities such as ad breaks (default: False)
|
||||||
extractor_args: A dictionary of arguments to be passed to the extractors.
|
extractor_args: A dictionary of arguments to be passed to the extractors.
|
||||||
See "EXTRACTOR ARGUMENTS" for details.
|
See "EXTRACTOR ARGUMENTS" for details.
|
||||||
|
Argument values must always be a list of string(s).
|
||||||
E.g. {'youtube': {'skip': ['dash', 'hls']}}
|
E.g. {'youtube': {'skip': ['dash', 'hls']}}
|
||||||
mark_watched: Mark videos watched (even with --simulate). Only for YouTube
|
mark_watched: Mark videos watched (even with --simulate). Only for YouTube
|
||||||
|
|
||||||
@@ -561,32 +565,14 @@ class YoutubeDL:
|
|||||||
allsubtitles: - Use subtitleslangs = ['all']
|
allsubtitles: - Use subtitleslangs = ['all']
|
||||||
Downloads all the subtitles of the video
|
Downloads all the subtitles of the video
|
||||||
(requires writesubtitles or writeautomaticsub)
|
(requires writesubtitles or writeautomaticsub)
|
||||||
include_ads: - Doesn't work
|
|
||||||
Download ads as well
|
|
||||||
call_home: - Not implemented
|
|
||||||
Boolean, true if we are allowed to contact the
|
|
||||||
yt-dlp servers for debugging.
|
|
||||||
post_hooks: - Register a custom postprocessor
|
post_hooks: - Register a custom postprocessor
|
||||||
A list of functions that get called as the final step
|
A list of functions that get called as the final step
|
||||||
for each video file, after all postprocessors have been
|
for each video file, after all postprocessors have been
|
||||||
called. The filename will be passed as the only argument.
|
called. The filename will be passed as the only argument.
|
||||||
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
||||||
Use the native HLS downloader instead of ffmpeg/avconv
|
Use the native HLS downloader instead of ffmpeg
|
||||||
if True, otherwise use ffmpeg/avconv if False, otherwise
|
if True, otherwise use ffmpeg if False, otherwise
|
||||||
use downloader suggested by extractor if None.
|
use downloader suggested by extractor if None.
|
||||||
prefer_ffmpeg: - avconv support is deprecated
|
|
||||||
If False, use avconv instead of ffmpeg if both are available,
|
|
||||||
otherwise prefer ffmpeg.
|
|
||||||
youtube_include_dash_manifest: - Use extractor_args
|
|
||||||
If True (default), DASH manifests and related
|
|
||||||
data will be downloaded and processed by extractor.
|
|
||||||
You can reduce network I/O by disabling it if you don't
|
|
||||||
care about DASH. (only for youtube)
|
|
||||||
youtube_include_hls_manifest: - Use extractor_args
|
|
||||||
If True (default), HLS manifests and related
|
|
||||||
data will be downloaded and processed by extractor.
|
|
||||||
You can reduce network I/O by disabling it if you don't
|
|
||||||
care about HLS. (only for youtube)
|
|
||||||
no_color: Same as `color='no_color'`
|
no_color: Same as `color='no_color'`
|
||||||
no_overwrites: Same as `overwrites=False`
|
no_overwrites: Same as `overwrites=False`
|
||||||
"""
|
"""
|
||||||
@@ -594,7 +580,7 @@ class YoutubeDL:
|
|||||||
_NUMERIC_FIELDS = {
|
_NUMERIC_FIELDS = {
|
||||||
'width', 'height', 'asr', 'audio_channels', 'fps',
|
'width', 'height', 'asr', 'audio_channels', 'fps',
|
||||||
'tbr', 'abr', 'vbr', 'filesize', 'filesize_approx',
|
'tbr', 'abr', 'vbr', 'filesize', 'filesize_approx',
|
||||||
'timestamp', 'release_timestamp',
|
'timestamp', 'release_timestamp', 'available_at',
|
||||||
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
|
'duration', 'view_count', 'like_count', 'dislike_count', 'repost_count',
|
||||||
'average_rating', 'comment_count', 'age_limit',
|
'average_rating', 'comment_count', 'age_limit',
|
||||||
'start_time', 'end_time',
|
'start_time', 'end_time',
|
||||||
@@ -604,13 +590,13 @@ class YoutubeDL:
|
|||||||
|
|
||||||
_format_fields = {
|
_format_fields = {
|
||||||
# NB: Keep in sync with the docstring of extractor/common.py
|
# NB: Keep in sync with the docstring of extractor/common.py
|
||||||
'url', 'manifest_url', 'manifest_stream_number', 'ext', 'format', 'format_id', 'format_note',
|
'url', 'manifest_url', 'manifest_stream_number', 'ext', 'format', 'format_id', 'format_note', 'available_at',
|
||||||
'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels',
|
'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels',
|
||||||
'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', 'hls_media_playlist_data',
|
'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', 'hls_media_playlist_data',
|
||||||
'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data',
|
'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data',
|
||||||
'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies',
|
'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies',
|
||||||
'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'extra_param_to_key_url',
|
'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'extra_param_to_key_url',
|
||||||
'hls_aes', 'downloader_options', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version',
|
'hls_aes', 'downloader_options', 'impersonate', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version',
|
||||||
'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time',
|
'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time',
|
||||||
}
|
}
|
||||||
_deprecated_multivalue_fields = {
|
_deprecated_multivalue_fields = {
|
||||||
@@ -640,6 +626,7 @@ def __init__(self, params=None, auto_init=True):
|
|||||||
self._printed_messages = set()
|
self._printed_messages = set()
|
||||||
self._first_webpage_request = True
|
self._first_webpage_request = True
|
||||||
self._post_hooks = []
|
self._post_hooks = []
|
||||||
|
self._close_hooks = []
|
||||||
self._progress_hooks = []
|
self._progress_hooks = []
|
||||||
self._postprocessor_hooks = []
|
self._postprocessor_hooks = []
|
||||||
self._download_retcode = 0
|
self._download_retcode = 0
|
||||||
@@ -699,6 +686,9 @@ def process_color_policy(stream):
|
|||||||
system_deprecation = _get_system_deprecation()
|
system_deprecation = _get_system_deprecation()
|
||||||
if system_deprecation:
|
if system_deprecation:
|
||||||
self.deprecated_feature(system_deprecation.replace('\n', '\n '))
|
self.deprecated_feature(system_deprecation.replace('\n', '\n '))
|
||||||
|
elif self.params.get('warn_when_outdated'):
|
||||||
|
if outdated_warning := _get_outdated_warning():
|
||||||
|
self.report_warning(outdated_warning)
|
||||||
|
|
||||||
if self.params.get('allow_unplayable_formats'):
|
if self.params.get('allow_unplayable_formats'):
|
||||||
self.report_warning(
|
self.report_warning(
|
||||||
@@ -741,12 +731,6 @@ def check_deprecated(param, option, suggestion):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
|
|
||||||
if self.params.get('geo_verification_proxy') is None:
|
|
||||||
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
|
|
||||||
|
|
||||||
check_deprecated('autonumber', '--auto-number', '-o "%(autonumber)s-%(title)s.%(ext)s"')
|
|
||||||
check_deprecated('usetitle', '--title', '-o "%(title)s-%(id)s.%(ext)s"')
|
|
||||||
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
||||||
|
|
||||||
for msg in self.params.get('_warnings', []):
|
for msg in self.params.get('_warnings', []):
|
||||||
@@ -908,6 +892,11 @@ def add_post_hook(self, ph):
|
|||||||
"""Add the post hook"""
|
"""Add the post hook"""
|
||||||
self._post_hooks.append(ph)
|
self._post_hooks.append(ph)
|
||||||
|
|
||||||
|
def add_close_hook(self, ch):
|
||||||
|
"""Add a close hook, called when YoutubeDL.close() is called"""
|
||||||
|
assert callable(ch), 'Close hook must be callable'
|
||||||
|
self._close_hooks.append(ch)
|
||||||
|
|
||||||
def add_progress_hook(self, ph):
|
def add_progress_hook(self, ph):
|
||||||
"""Add the download progress hook"""
|
"""Add the download progress hook"""
|
||||||
self._progress_hooks.append(ph)
|
self._progress_hooks.append(ph)
|
||||||
@@ -1016,6 +1005,9 @@ def close(self):
|
|||||||
self._request_director.close()
|
self._request_director.close()
|
||||||
del self._request_director
|
del self._request_director
|
||||||
|
|
||||||
|
for close_hook in self._close_hooks:
|
||||||
|
close_hook()
|
||||||
|
|
||||||
def trouble(self, message=None, tb=None, is_error=True):
|
def trouble(self, message=None, tb=None, is_error=True):
|
||||||
"""Determine action to take when a download problem appears.
|
"""Determine action to take when a download problem appears.
|
||||||
|
|
||||||
@@ -2185,7 +2177,7 @@ def _filter(f):
|
|||||||
return op(actual_value, comparison_value)
|
return op(actual_value, comparison_value)
|
||||||
return _filter
|
return _filter
|
||||||
|
|
||||||
def _check_formats(self, formats):
|
def _check_formats(self, formats, warning=True):
|
||||||
for f in formats:
|
for f in formats:
|
||||||
working = f.get('__working')
|
working = f.get('__working')
|
||||||
if working is not None:
|
if working is not None:
|
||||||
@@ -2198,6 +2190,9 @@ def _check_formats(self, formats):
|
|||||||
continue
|
continue
|
||||||
temp_file = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False, dir=path or None)
|
temp_file = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False, dir=path or None)
|
||||||
temp_file.close()
|
temp_file.close()
|
||||||
|
# If FragmentFD fails when testing a fragment, it will wrongly set a non-zero return code.
|
||||||
|
# Save the actual return code for later. See https://github.com/yt-dlp/yt-dlp/issues/13750
|
||||||
|
original_retcode = self._download_retcode
|
||||||
try:
|
try:
|
||||||
success, _ = self.dl(temp_file.name, f, test=True)
|
success, _ = self.dl(temp_file.name, f, test=True)
|
||||||
except (DownloadError, OSError, ValueError, *network_exceptions):
|
except (DownloadError, OSError, ValueError, *network_exceptions):
|
||||||
@@ -2208,11 +2203,18 @@ def _check_formats(self, formats):
|
|||||||
os.remove(temp_file.name)
|
os.remove(temp_file.name)
|
||||||
except OSError:
|
except OSError:
|
||||||
self.report_warning(f'Unable to delete temporary file "{temp_file.name}"')
|
self.report_warning(f'Unable to delete temporary file "{temp_file.name}"')
|
||||||
|
# Restore the actual return code
|
||||||
|
self._download_retcode = original_retcode
|
||||||
f['__working'] = success
|
f['__working'] = success
|
||||||
if success:
|
if success:
|
||||||
|
f.pop('__needs_testing', None)
|
||||||
yield f
|
yield f
|
||||||
else:
|
else:
|
||||||
self.to_screen('[info] Unable to download format {}. Skipping...'.format(f['format_id']))
|
msg = f'Unable to download format {f["format_id"]}. Skipping...'
|
||||||
|
if warning:
|
||||||
|
self.report_warning(msg)
|
||||||
|
else:
|
||||||
|
self.to_screen(f'[info] {msg}')
|
||||||
|
|
||||||
def _select_formats(self, formats, selector):
|
def _select_formats(self, formats, selector):
|
||||||
return list(selector({
|
return list(selector({
|
||||||
@@ -2692,11 +2694,7 @@ def _fill_common_fields(self, info_dict, final=True):
|
|||||||
('modified_timestamp', 'modified_date'),
|
('modified_timestamp', 'modified_date'),
|
||||||
):
|
):
|
||||||
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
||||||
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
|
info_dict[date_key] = strftime_or_none(info_dict[ts_key])
|
||||||
# see http://bugs.python.org/issue1646728)
|
|
||||||
with contextlib.suppress(ValueError, OverflowError, OSError):
|
|
||||||
upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc)
|
|
||||||
info_dict[date_key] = upload_date.strftime('%Y%m%d')
|
|
||||||
|
|
||||||
if not info_dict.get('release_year'):
|
if not info_dict.get('release_year'):
|
||||||
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
||||||
@@ -2938,7 +2936,7 @@ def is_wellformed(f):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.params.get('check_formats') is True:
|
if self.params.get('check_formats') is True:
|
||||||
formats = LazyList(self._check_formats(formats[::-1]), reverse=True)
|
formats = LazyList(self._check_formats(formats[::-1], warning=False), reverse=True)
|
||||||
|
|
||||||
if not formats or formats[0] is not info_dict:
|
if not formats or formats[0] is not info_dict:
|
||||||
# only set the 'formats' fields if the original info_dict list them
|
# only set the 'formats' fields if the original info_dict list them
|
||||||
@@ -3211,6 +3209,7 @@ def dl(self, name, info, subtitle=False, test=False):
|
|||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
params = self.params
|
params = self.params
|
||||||
|
|
||||||
fd = get_suitable_downloader(info, params, to_stdout=(name == '-'))(self, params)
|
fd = get_suitable_downloader(info, params, to_stdout=(name == '-'))(self, params)
|
||||||
if not test:
|
if not test:
|
||||||
for ph in self._progress_hooks:
|
for ph in self._progress_hooks:
|
||||||
@@ -3313,28 +3312,6 @@ def check_max_downloads():
|
|||||||
elif _infojson_written is None:
|
elif _infojson_written is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Note: Annotations are deprecated
|
|
||||||
annofn = None
|
|
||||||
if self.params.get('writeannotations', False):
|
|
||||||
annofn = self.prepare_filename(info_dict, 'annotation')
|
|
||||||
if annofn:
|
|
||||||
if not self._ensure_dir_exists(annofn):
|
|
||||||
return
|
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(annofn):
|
|
||||||
self.to_screen('[info] Video annotations are already present')
|
|
||||||
elif not info_dict.get('annotations'):
|
|
||||||
self.report_warning('There are no annotations to write.')
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
|
||||||
with open(annofn, 'w', encoding='utf-8') as annofile:
|
|
||||||
annofile.write(info_dict['annotations'])
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
self.report_warning('There are no annotations to write.')
|
|
||||||
except OSError:
|
|
||||||
self.report_error('Cannot write annotations file: ' + annofn)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Write internet shortcut files
|
# Write internet shortcut files
|
||||||
def _write_link_file(link_type):
|
def _write_link_file(link_type):
|
||||||
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
||||||
@@ -3686,6 +3663,8 @@ def filter_fn(obj):
|
|||||||
return {k: filter_fn(v) for k, v in obj.items() if not reject(k, v)}
|
return {k: filter_fn(v) for k, v in obj.items() if not reject(k, v)}
|
||||||
elif isinstance(obj, (list, tuple, set, LazyList)):
|
elif isinstance(obj, (list, tuple, set, LazyList)):
|
||||||
return list(map(filter_fn, obj))
|
return list(map(filter_fn, obj))
|
||||||
|
elif isinstance(obj, ImpersonateTarget):
|
||||||
|
return str(obj)
|
||||||
elif obj is None or isinstance(obj, (str, int, float, bool)):
|
elif obj is None or isinstance(obj, (str, int, float, bool)):
|
||||||
return obj
|
return obj
|
||||||
else:
|
else:
|
||||||
@@ -3954,6 +3933,7 @@ def simplified_codec(f, field):
|
|||||||
self._format_out('UNSUPPORTED', self.Styles.BAD_FORMAT) if f.get('ext') in ('f4f', 'f4m') else None,
|
self._format_out('UNSUPPORTED', self.Styles.BAD_FORMAT) if f.get('ext') in ('f4f', 'f4m') else None,
|
||||||
(self._format_out('Maybe DRM', self.Styles.WARNING) if f.get('has_drm') == 'maybe'
|
(self._format_out('Maybe DRM', self.Styles.WARNING) if f.get('has_drm') == 'maybe'
|
||||||
else self._format_out('DRM', self.Styles.BAD_FORMAT) if f.get('has_drm') else None),
|
else self._format_out('DRM', self.Styles.BAD_FORMAT) if f.get('has_drm') else None),
|
||||||
|
self._format_out('Untested', self.Styles.WARNING) if f.get('__needs_testing') else None,
|
||||||
format_field(f, 'format_note'),
|
format_field(f, 'format_note'),
|
||||||
format_field(f, 'container', ignore=(None, f.get('ext'))),
|
format_field(f, 'container', ignore=(None, f.get('ext'))),
|
||||||
delim=', '), delim=' '),
|
delim=', '), delim=' '),
|
||||||
@@ -4015,8 +3995,7 @@ def get_encoding(stream):
|
|||||||
if os.environ.get('TERM', '').lower() == 'dumb':
|
if os.environ.get('TERM', '').lower() == 'dumb':
|
||||||
additional_info.append('dumb')
|
additional_info.append('dumb')
|
||||||
if not supports_terminal_sequences(stream):
|
if not supports_terminal_sequences(stream):
|
||||||
from .utils import WINDOWS_VT_MODE # Must be imported locally
|
additional_info.append('No VT' if WINDOWS_VT_MODE.value is False else 'No ANSI')
|
||||||
additional_info.append('No VT' if WINDOWS_VT_MODE is False else 'No ANSI')
|
|
||||||
if additional_info:
|
if additional_info:
|
||||||
ret = f'{ret} ({",".join(additional_info)})'
|
ret = f'{ret} ({",".join(additional_info)})'
|
||||||
return ret
|
return ret
|
||||||
@@ -4162,6 +4141,31 @@ def _impersonate_target_available(self, target):
|
|||||||
for rh in self._request_director.handlers.values()
|
for rh in self._request_director.handlers.values()
|
||||||
if isinstance(rh, ImpersonateRequestHandler))
|
if isinstance(rh, ImpersonateRequestHandler))
|
||||||
|
|
||||||
|
def _parse_impersonate_targets(self, impersonate):
|
||||||
|
if impersonate in (True, ''):
|
||||||
|
impersonate = ImpersonateTarget()
|
||||||
|
|
||||||
|
requested_targets = [
|
||||||
|
t if isinstance(t, ImpersonateTarget) else ImpersonateTarget.from_str(t)
|
||||||
|
for t in variadic(impersonate)
|
||||||
|
] if impersonate else []
|
||||||
|
|
||||||
|
available_target = next(filter(self._impersonate_target_available, requested_targets), None)
|
||||||
|
|
||||||
|
return available_target, requested_targets
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _unavailable_targets_message(requested_targets, note=None, is_error=False):
|
||||||
|
note = note or 'The extractor specified to use impersonation for this download'
|
||||||
|
specific_targets = ', '.join(filter(None, map(str, requested_targets)))
|
||||||
|
message = (
|
||||||
|
'no impersonate target is available' if not specific_targets
|
||||||
|
else f'none of these impersonate targets are available: {specific_targets}')
|
||||||
|
return (
|
||||||
|
f'{note}, but {message}. {"See" if is_error else "If you encounter errors, then see"}'
|
||||||
|
f' https://github.com/yt-dlp/yt-dlp#impersonation '
|
||||||
|
f'for information on installing the required dependencies')
|
||||||
|
|
||||||
def urlopen(self, req):
|
def urlopen(self, req):
|
||||||
""" Start an HTTP download """
|
""" Start an HTTP download """
|
||||||
if isinstance(req, str):
|
if isinstance(req, str):
|
||||||
|
|||||||
@@ -59,11 +59,9 @@
|
|||||||
render_table,
|
render_table,
|
||||||
setproctitle,
|
setproctitle,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
traverse_obj,
|
|
||||||
variadic,
|
variadic,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
from .utils.networking import std_headers
|
|
||||||
from .utils._utils import _UnsafeExtensionError
|
from .utils._utils import _UnsafeExtensionError
|
||||||
from .YoutubeDL import YoutubeDL
|
from .YoutubeDL import YoutubeDL
|
||||||
|
|
||||||
@@ -159,6 +157,12 @@ def set_default_compat(compat_name, opt_name, default=True, remove_compat=True):
|
|||||||
elif 'prefer-vp9-sort' in opts.compat_opts:
|
elif 'prefer-vp9-sort' in opts.compat_opts:
|
||||||
opts.format_sort.extend(FormatSorter._prefer_vp9_sort)
|
opts.format_sort.extend(FormatSorter._prefer_vp9_sort)
|
||||||
|
|
||||||
|
if 'mtime-by-default' in opts.compat_opts:
|
||||||
|
if opts.updatetime is None:
|
||||||
|
opts.updatetime = True
|
||||||
|
else:
|
||||||
|
_unused_compat_opt('mtime-by-default')
|
||||||
|
|
||||||
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
||||||
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
||||||
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
||||||
@@ -494,6 +498,14 @@ def metadataparser_actions(f):
|
|||||||
'To let yt-dlp download and merge the best available formats, simply do not pass any format selection',
|
'To let yt-dlp download and merge the best available formats, simply do not pass any format selection',
|
||||||
'If you know what you are doing and want only the best pre-merged format, use "-f b" instead to suppress this warning')))
|
'If you know what you are doing and want only the best pre-merged format, use "-f b" instead to suppress this warning')))
|
||||||
|
|
||||||
|
# Common mistake: -f mp4
|
||||||
|
if opts.format == 'mp4':
|
||||||
|
warnings.append('.\n '.join((
|
||||||
|
'"-f mp4" selects the best pre-merged mp4 format which is often not what\'s intended',
|
||||||
|
'Pre-merged mp4 formats are not available from all sites, or may only be available in lower quality',
|
||||||
|
'To prioritize the best h264 video and aac audio in an mp4 container, use "-t mp4" instead',
|
||||||
|
'If you know what you are doing and want a pre-merged mp4 format, use "-f b[ext=mp4]" instead to suppress this warning')))
|
||||||
|
|
||||||
# --(postprocessor/downloader)-args without name
|
# --(postprocessor/downloader)-args without name
|
||||||
def report_args_compat(name, value, key1, key2=None, where=None):
|
def report_args_compat(name, value, key1, key2=None, where=None):
|
||||||
if key1 in value and key2 not in value:
|
if key1 in value and key2 not in value:
|
||||||
@@ -509,7 +521,6 @@ def report_args_compat(name, value, key1, key2=None, where=None):
|
|||||||
|
|
||||||
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
||||||
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
||||||
opts.postprocessor_args.setdefault('sponskrub', [])
|
|
||||||
|
|
||||||
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
||||||
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
||||||
@@ -534,11 +545,6 @@ def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_u
|
|||||||
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
||||||
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
||||||
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
|
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
|
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
|
|
||||||
report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
|
|
||||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
|
||||||
|
|
||||||
# Conflicts with --allow-unplayable-formats
|
# Conflicts with --allow-unplayable-formats
|
||||||
report_conflict('--embed-metadata', 'addmetadata')
|
report_conflict('--embed-metadata', 'addmetadata')
|
||||||
@@ -551,23 +557,15 @@ def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_u
|
|||||||
report_conflict('--recode-video', 'recodevideo')
|
report_conflict('--recode-video', 'recodevideo')
|
||||||
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
||||||
report_conflict('--remux-video', 'remuxvideo')
|
report_conflict('--remux-video', 'remuxvideo')
|
||||||
report_conflict('--sponskrub', 'sponskrub')
|
|
||||||
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
||||||
report_conflict('--xattrs', 'xattrs')
|
report_conflict('--xattrs', 'xattrs')
|
||||||
|
|
||||||
# Fully deprecated options
|
if hasattr(opts, '_deprecated_options'):
|
||||||
def report_deprecation(val, old, new=None):
|
|
||||||
if not val:
|
|
||||||
return
|
|
||||||
deprecation_warnings.append(
|
deprecation_warnings.append(
|
||||||
f'{old} is deprecated and may be removed in a future version. Use {new} instead' if new
|
f'The following options have been deprecated: {", ".join(opts._deprecated_options)}\n'
|
||||||
else f'{old} is deprecated and may not work as expected')
|
'Please remove them from your command/configuration to avoid future errors.\n'
|
||||||
|
'See https://github.com/yt-dlp/yt-dlp/issues/14198 for more details')
|
||||||
report_deprecation(opts.sponskrub, '--sponskrub', '--sponsorblock-mark or --sponsorblock-remove')
|
del opts._deprecated_options
|
||||||
report_deprecation(not opts.prefer_ffmpeg, '--prefer-avconv', 'ffmpeg')
|
|
||||||
# report_deprecation(opts.include_ads, '--include-ads') # We may re-implement this in future
|
|
||||||
# report_deprecation(opts.call_home, '--call-home') # We may re-implement this in future
|
|
||||||
# report_deprecation(opts.writeannotations, '--write-annotations') # It's just that no website has it
|
|
||||||
|
|
||||||
# Dependent options
|
# Dependent options
|
||||||
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
||||||
@@ -698,21 +696,6 @@ def get_postprocessors(opts):
|
|||||||
'add_metadata': opts.addmetadata,
|
'add_metadata': opts.addmetadata,
|
||||||
'add_infojson': opts.embed_infojson,
|
'add_infojson': opts.embed_infojson,
|
||||||
}
|
}
|
||||||
# Deprecated
|
|
||||||
# This should be above EmbedThumbnail since sponskrub removes the thumbnail attachment
|
|
||||||
# but must be below EmbedSubtitle and FFmpegMetadata
|
|
||||||
# See https://github.com/yt-dlp/yt-dlp/issues/204 , https://github.com/faissaloo/SponSkrub/issues/29
|
|
||||||
# If opts.sponskrub is None, sponskrub is used, but it silently fails if the executable can't be found
|
|
||||||
if opts.sponskrub is not False:
|
|
||||||
yield {
|
|
||||||
'key': 'SponSkrub',
|
|
||||||
'path': opts.sponskrub_path,
|
|
||||||
'args': opts.sponskrub_args,
|
|
||||||
'cut': opts.sponskrub_cut,
|
|
||||||
'force': opts.sponskrub_force,
|
|
||||||
'ignoreerror': opts.sponskrub is None,
|
|
||||||
'_from_cli': True,
|
|
||||||
}
|
|
||||||
if opts.embedthumbnail:
|
if opts.embedthumbnail:
|
||||||
yield {
|
yield {
|
||||||
'key': 'EmbedThumbnail',
|
'key': 'EmbedThumbnail',
|
||||||
@@ -871,7 +854,6 @@ def parse_options(argv=None):
|
|||||||
'nopart': opts.nopart,
|
'nopart': opts.nopart,
|
||||||
'updatetime': opts.updatetime,
|
'updatetime': opts.updatetime,
|
||||||
'writedescription': opts.writedescription,
|
'writedescription': opts.writedescription,
|
||||||
'writeannotations': opts.writeannotations,
|
|
||||||
'writeinfojson': opts.writeinfojson,
|
'writeinfojson': opts.writeinfojson,
|
||||||
'allow_playlist_files': opts.allow_playlist_files,
|
'allow_playlist_files': opts.allow_playlist_files,
|
||||||
'clean_infojson': opts.clean_infojson,
|
'clean_infojson': opts.clean_infojson,
|
||||||
@@ -905,7 +887,6 @@ def parse_options(argv=None):
|
|||||||
'max_views': opts.max_views,
|
'max_views': opts.max_views,
|
||||||
'daterange': opts.date,
|
'daterange': opts.date,
|
||||||
'cachedir': opts.cachedir,
|
'cachedir': opts.cachedir,
|
||||||
'youtube_print_sig_code': opts.youtube_print_sig_code,
|
|
||||||
'age_limit': opts.age_limit,
|
'age_limit': opts.age_limit,
|
||||||
'download_archive': opts.download_archive,
|
'download_archive': opts.download_archive,
|
||||||
'break_on_existing': opts.break_on_existing,
|
'break_on_existing': opts.break_on_existing,
|
||||||
@@ -923,13 +904,9 @@ def parse_options(argv=None):
|
|||||||
'socket_timeout': opts.socket_timeout,
|
'socket_timeout': opts.socket_timeout,
|
||||||
'bidi_workaround': opts.bidi_workaround,
|
'bidi_workaround': opts.bidi_workaround,
|
||||||
'debug_printtraffic': opts.debug_printtraffic,
|
'debug_printtraffic': opts.debug_printtraffic,
|
||||||
'prefer_ffmpeg': opts.prefer_ffmpeg,
|
|
||||||
'include_ads': opts.include_ads,
|
|
||||||
'default_search': opts.default_search,
|
'default_search': opts.default_search,
|
||||||
'dynamic_mpd': opts.dynamic_mpd,
|
'dynamic_mpd': opts.dynamic_mpd,
|
||||||
'extractor_args': opts.extractor_args,
|
'extractor_args': opts.extractor_args,
|
||||||
'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
|
|
||||||
'youtube_include_hls_manifest': opts.youtube_include_hls_manifest,
|
|
||||||
'encoding': opts.encoding,
|
'encoding': opts.encoding,
|
||||||
'extract_flat': opts.extract_flat,
|
'extract_flat': opts.extract_flat,
|
||||||
'live_from_start': opts.live_from_start,
|
'live_from_start': opts.live_from_start,
|
||||||
@@ -941,7 +918,6 @@ def parse_options(argv=None):
|
|||||||
'fixup': opts.fixup,
|
'fixup': opts.fixup,
|
||||||
'source_address': opts.source_address,
|
'source_address': opts.source_address,
|
||||||
'impersonate': opts.impersonate,
|
'impersonate': opts.impersonate,
|
||||||
'call_home': opts.call_home,
|
|
||||||
'sleep_interval_requests': opts.sleep_interval_requests,
|
'sleep_interval_requests': opts.sleep_interval_requests,
|
||||||
'sleep_interval': opts.sleep_interval,
|
'sleep_interval': opts.sleep_interval,
|
||||||
'max_sleep_interval': opts.max_sleep_interval,
|
'max_sleep_interval': opts.max_sleep_interval,
|
||||||
@@ -951,7 +927,6 @@ def parse_options(argv=None):
|
|||||||
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
||||||
'list_thumbnails': opts.list_thumbnails,
|
'list_thumbnails': opts.list_thumbnails,
|
||||||
'playlist_items': opts.playlist_items,
|
'playlist_items': opts.playlist_items,
|
||||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
|
||||||
'match_filter': opts.match_filter,
|
'match_filter': opts.match_filter,
|
||||||
'color': opts.color,
|
'color': opts.color,
|
||||||
'ffmpeg_location': opts.ffmpeg_location,
|
'ffmpeg_location': opts.ffmpeg_location,
|
||||||
@@ -960,11 +935,12 @@ def parse_options(argv=None):
|
|||||||
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
||||||
'external_downloader_args': opts.external_downloader_args,
|
'external_downloader_args': opts.external_downloader_args,
|
||||||
'postprocessor_args': opts.postprocessor_args,
|
'postprocessor_args': opts.postprocessor_args,
|
||||||
'cn_verification_proxy': opts.cn_verification_proxy,
|
|
||||||
'geo_verification_proxy': opts.geo_verification_proxy,
|
'geo_verification_proxy': opts.geo_verification_proxy,
|
||||||
'geo_bypass': opts.geo_bypass,
|
'geo_bypass': opts.geo_bypass,
|
||||||
'geo_bypass_country': opts.geo_bypass_country,
|
'geo_bypass_country': opts.geo_bypass_country,
|
||||||
'geo_bypass_ip_block': opts.geo_bypass_ip_block,
|
'geo_bypass_ip_block': opts.geo_bypass_ip_block,
|
||||||
|
'useid': opts.useid or None,
|
||||||
|
'warn_when_outdated': opts.update_self is None,
|
||||||
'_warnings': warnings,
|
'_warnings': warnings,
|
||||||
'_deprecation_warnings': deprecation_warnings,
|
'_deprecation_warnings': deprecation_warnings,
|
||||||
'compat_opts': opts.compat_opts,
|
'compat_opts': opts.compat_opts,
|
||||||
@@ -976,12 +952,6 @@ def _real_main(argv=None):
|
|||||||
|
|
||||||
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
||||||
|
|
||||||
# Dump user agent
|
|
||||||
if opts.dump_user_agent:
|
|
||||||
ua = traverse_obj(opts.headers, 'User-Agent', casesense=False, default=std_headers['User-Agent'])
|
|
||||||
write_string(f'{ua}\n', out=sys.stdout)
|
|
||||||
return
|
|
||||||
|
|
||||||
if print_extractor_information(opts, all_urls):
|
if print_extractor_information(opts, all_urls):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -1024,6 +994,7 @@ def _real_main(argv=None):
|
|||||||
(ImpersonateTarget('safari'), 'curl_cffi'),
|
(ImpersonateTarget('safari'), 'curl_cffi'),
|
||||||
(ImpersonateTarget('firefox'), 'curl_cffi>=0.10'),
|
(ImpersonateTarget('firefox'), 'curl_cffi>=0.10'),
|
||||||
(ImpersonateTarget('edge'), 'curl_cffi'),
|
(ImpersonateTarget('edge'), 'curl_cffi'),
|
||||||
|
(ImpersonateTarget('tor'), 'curl_cffi>=0.11'),
|
||||||
]
|
]
|
||||||
|
|
||||||
available_targets = ydl._get_available_impersonate_targets()
|
available_targets = ydl._get_available_impersonate_targets()
|
||||||
|
|||||||
@@ -435,7 +435,7 @@ def sub_bytes_inv(data):
|
|||||||
|
|
||||||
|
|
||||||
def rotate(data):
|
def rotate(data):
|
||||||
return data[1:] + [data[0]]
|
return [*data[1:], data[0]]
|
||||||
|
|
||||||
|
|
||||||
def key_schedule_core(data, rcon_iteration):
|
def key_schedule_core(data, rcon_iteration):
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
|
|
||||||
@@ -27,6 +28,13 @@ def compat_ord(c):
|
|||||||
return c if isinstance(c, int) else ord(c)
|
return c if isinstance(c, int) else ord(c)
|
||||||
|
|
||||||
|
|
||||||
|
def compat_datetime_from_timestamp(timestamp):
|
||||||
|
# Calling dt.datetime.fromtimestamp with negative timestamps throws error in Windows
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/81708,
|
||||||
|
# https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
|
||||||
|
return (dt.datetime.fromtimestamp(0, dt.timezone.utc) + dt.timedelta(seconds=timestamp))
|
||||||
|
|
||||||
|
|
||||||
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
||||||
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
||||||
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
||||||
|
|||||||
@@ -37,7 +37,7 @@
|
|||||||
from ..dependencies.Cryptodome import AES as compat_pycrypto_AES # noqa: F401
|
from ..dependencies.Cryptodome import AES as compat_pycrypto_AES # noqa: F401
|
||||||
from ..networking.exceptions import HTTPError as compat_HTTPError
|
from ..networking.exceptions import HTTPError as compat_HTTPError
|
||||||
|
|
||||||
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
passthrough_module(__name__, '...utils', ('windows_enable_vt_mode',))
|
||||||
|
|
||||||
|
|
||||||
# compat_ctypes_WINFUNCTYPE = ctypes.WINFUNCTYPE
|
# compat_ctypes_WINFUNCTYPE = ctypes.WINFUNCTYPE
|
||||||
|
|||||||
@@ -125,6 +125,8 @@ def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(),
|
|||||||
|
|
||||||
|
|
||||||
def _extract_firefox_cookies(profile, container, logger):
|
def _extract_firefox_cookies(profile, container, logger):
|
||||||
|
MAX_SUPPORTED_DB_SCHEMA_VERSION = 16
|
||||||
|
|
||||||
logger.info('Extracting cookies from firefox')
|
logger.info('Extracting cookies from firefox')
|
||||||
if not sqlite3:
|
if not sqlite3:
|
||||||
logger.warning('Cannot extract cookies from firefox without sqlite3 support. '
|
logger.warning('Cannot extract cookies from firefox without sqlite3 support. '
|
||||||
@@ -159,9 +161,11 @@ def _extract_firefox_cookies(profile, container, logger):
|
|||||||
raise ValueError(f'could not find firefox container "{container}" in containers.json')
|
raise ValueError(f'could not find firefox container "{container}" in containers.json')
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
||||||
cursor = None
|
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
||||||
try:
|
with contextlib.closing(cursor.connection):
|
||||||
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
db_schema_version = cursor.execute('PRAGMA user_version;').fetchone()[0]
|
||||||
|
if db_schema_version > MAX_SUPPORTED_DB_SCHEMA_VERSION:
|
||||||
|
logger.warning(f'Possibly unsupported firefox cookies database version: {db_schema_version}')
|
||||||
if isinstance(container_id, int):
|
if isinstance(container_id, int):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f'Only loading cookies from firefox container "{container}", ID {container_id}')
|
f'Only loading cookies from firefox container "{container}", ID {container_id}')
|
||||||
@@ -180,6 +184,10 @@ def _extract_firefox_cookies(profile, container, logger):
|
|||||||
total_cookie_count = len(table)
|
total_cookie_count = len(table)
|
||||||
for i, (host, name, value, path, expiry, is_secure) in enumerate(table):
|
for i, (host, name, value, path, expiry, is_secure) in enumerate(table):
|
||||||
progress_bar.print(f'Loading cookie {i: 6d}/{total_cookie_count: 6d}')
|
progress_bar.print(f'Loading cookie {i: 6d}/{total_cookie_count: 6d}')
|
||||||
|
# FF142 upgraded cookies DB to schema version 16 and started using milliseconds for cookie expiry
|
||||||
|
# Ref: https://github.com/mozilla-firefox/firefox/commit/5869af852cd20425165837f6c2d9971f3efba83d
|
||||||
|
if db_schema_version >= 16 and expiry is not None:
|
||||||
|
expiry /= 1000
|
||||||
cookie = http.cookiejar.Cookie(
|
cookie = http.cookiejar.Cookie(
|
||||||
version=0, name=name, value=value, port=None, port_specified=False,
|
version=0, name=name, value=value, port=None, port_specified=False,
|
||||||
domain=host, domain_specified=bool(host), domain_initial_dot=host.startswith('.'),
|
domain=host, domain_specified=bool(host), domain_initial_dot=host.startswith('.'),
|
||||||
@@ -188,9 +196,6 @@ def _extract_firefox_cookies(profile, container, logger):
|
|||||||
jar.set_cookie(cookie)
|
jar.set_cookie(cookie)
|
||||||
logger.info(f'Extracted {len(jar)} cookies from firefox')
|
logger.info(f'Extracted {len(jar)} cookies from firefox')
|
||||||
return jar
|
return jar
|
||||||
finally:
|
|
||||||
if cursor is not None:
|
|
||||||
cursor.connection.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _firefox_browser_dirs():
|
def _firefox_browser_dirs():
|
||||||
@@ -764,11 +769,11 @@ def _get_linux_desktop_environment(env, logger):
|
|||||||
GetDesktopEnvironment
|
GetDesktopEnvironment
|
||||||
"""
|
"""
|
||||||
xdg_current_desktop = env.get('XDG_CURRENT_DESKTOP', None)
|
xdg_current_desktop = env.get('XDG_CURRENT_DESKTOP', None)
|
||||||
desktop_session = env.get('DESKTOP_SESSION', None)
|
desktop_session = env.get('DESKTOP_SESSION', '')
|
||||||
if xdg_current_desktop is not None:
|
if xdg_current_desktop is not None:
|
||||||
for part in map(str.strip, xdg_current_desktop.split(':')):
|
for part in map(str.strip, xdg_current_desktop.split(':')):
|
||||||
if part == 'Unity':
|
if part == 'Unity':
|
||||||
if desktop_session is not None and 'gnome-fallback' in desktop_session:
|
if 'gnome-fallback' in desktop_session:
|
||||||
return _LinuxDesktopEnvironment.GNOME
|
return _LinuxDesktopEnvironment.GNOME
|
||||||
else:
|
else:
|
||||||
return _LinuxDesktopEnvironment.UNITY
|
return _LinuxDesktopEnvironment.UNITY
|
||||||
@@ -797,35 +802,34 @@ def _get_linux_desktop_environment(env, logger):
|
|||||||
return _LinuxDesktopEnvironment.UKUI
|
return _LinuxDesktopEnvironment.UKUI
|
||||||
elif part == 'LXQt':
|
elif part == 'LXQt':
|
||||||
return _LinuxDesktopEnvironment.LXQT
|
return _LinuxDesktopEnvironment.LXQT
|
||||||
logger.info(f'XDG_CURRENT_DESKTOP is set to an unknown value: "{xdg_current_desktop}"')
|
logger.debug(f'XDG_CURRENT_DESKTOP is set to an unknown value: "{xdg_current_desktop}"')
|
||||||
|
|
||||||
elif desktop_session is not None:
|
if desktop_session == 'deepin':
|
||||||
if desktop_session == 'deepin':
|
return _LinuxDesktopEnvironment.DEEPIN
|
||||||
return _LinuxDesktopEnvironment.DEEPIN
|
elif desktop_session in ('mate', 'gnome'):
|
||||||
elif desktop_session in ('mate', 'gnome'):
|
return _LinuxDesktopEnvironment.GNOME
|
||||||
return _LinuxDesktopEnvironment.GNOME
|
elif desktop_session in ('kde4', 'kde-plasma'):
|
||||||
elif desktop_session in ('kde4', 'kde-plasma'):
|
return _LinuxDesktopEnvironment.KDE4
|
||||||
|
elif desktop_session == 'kde':
|
||||||
|
if 'KDE_SESSION_VERSION' in env:
|
||||||
return _LinuxDesktopEnvironment.KDE4
|
return _LinuxDesktopEnvironment.KDE4
|
||||||
elif desktop_session == 'kde':
|
|
||||||
if 'KDE_SESSION_VERSION' in env:
|
|
||||||
return _LinuxDesktopEnvironment.KDE4
|
|
||||||
else:
|
|
||||||
return _LinuxDesktopEnvironment.KDE3
|
|
||||||
elif 'xfce' in desktop_session or desktop_session == 'xubuntu':
|
|
||||||
return _LinuxDesktopEnvironment.XFCE
|
|
||||||
elif desktop_session == 'ukui':
|
|
||||||
return _LinuxDesktopEnvironment.UKUI
|
|
||||||
else:
|
else:
|
||||||
logger.info(f'DESKTOP_SESSION is set to an unknown value: "{desktop_session}"')
|
return _LinuxDesktopEnvironment.KDE3
|
||||||
|
elif 'xfce' in desktop_session or desktop_session == 'xubuntu':
|
||||||
|
return _LinuxDesktopEnvironment.XFCE
|
||||||
|
elif desktop_session == 'ukui':
|
||||||
|
return _LinuxDesktopEnvironment.UKUI
|
||||||
else:
|
else:
|
||||||
if 'GNOME_DESKTOP_SESSION_ID' in env:
|
logger.debug(f'DESKTOP_SESSION is set to an unknown value: "{desktop_session}"')
|
||||||
return _LinuxDesktopEnvironment.GNOME
|
|
||||||
elif 'KDE_FULL_SESSION' in env:
|
if 'GNOME_DESKTOP_SESSION_ID' in env:
|
||||||
if 'KDE_SESSION_VERSION' in env:
|
return _LinuxDesktopEnvironment.GNOME
|
||||||
return _LinuxDesktopEnvironment.KDE4
|
elif 'KDE_FULL_SESSION' in env:
|
||||||
else:
|
if 'KDE_SESSION_VERSION' in env:
|
||||||
return _LinuxDesktopEnvironment.KDE3
|
return _LinuxDesktopEnvironment.KDE4
|
||||||
|
else:
|
||||||
|
return _LinuxDesktopEnvironment.KDE3
|
||||||
|
|
||||||
return _LinuxDesktopEnvironment.OTHER
|
return _LinuxDesktopEnvironment.OTHER
|
||||||
|
|
||||||
|
|
||||||
@@ -1336,7 +1340,7 @@ def prepare_line(line):
|
|||||||
if len(cookie_list) != self._ENTRY_LEN:
|
if len(cookie_list) != self._ENTRY_LEN:
|
||||||
raise http.cookiejar.LoadError(f'invalid length {len(cookie_list)}')
|
raise http.cookiejar.LoadError(f'invalid length {len(cookie_list)}')
|
||||||
cookie = self._CookieFileEntry(*cookie_list)
|
cookie = self._CookieFileEntry(*cookie_list)
|
||||||
if cookie.expires_at and not cookie.expires_at.isdigit():
|
if cookie.expires_at and not re.fullmatch(r'[0-9]+(?:\.[0-9]+)?', cookie.expires_at):
|
||||||
raise http.cookiejar.LoadError(f'invalid expires at {cookie.expires_at}')
|
raise http.cookiejar.LoadError(f'invalid expires at {cookie.expires_at}')
|
||||||
return line
|
return line
|
||||||
|
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ def _get_suitable_downloader(info_dict, protocol, params, default):
|
|||||||
if external_downloader is None:
|
if external_downloader is None:
|
||||||
if info_dict['to_stdout'] and FFmpegFD.can_merge_formats(info_dict, params):
|
if info_dict['to_stdout'] and FFmpegFD.can_merge_formats(info_dict, params):
|
||||||
return FFmpegFD
|
return FFmpegFD
|
||||||
elif external_downloader.lower() != 'native':
|
elif external_downloader.lower() != 'native' and info_dict.get('impersonate') is None:
|
||||||
ed = get_external_downloader(external_downloader)
|
ed = get_external_downloader(external_downloader)
|
||||||
if ed.can_download(info_dict, external_downloader):
|
if ed.can_download(info_dict, external_downloader):
|
||||||
return ed
|
return ed
|
||||||
|
|||||||
@@ -62,7 +62,6 @@ class FileDownloader:
|
|||||||
test: Download only first bytes to test the downloader.
|
test: Download only first bytes to test the downloader.
|
||||||
min_filesize: Skip files smaller than this size
|
min_filesize: Skip files smaller than this size
|
||||||
max_filesize: Skip files larger than this size
|
max_filesize: Skip files larger than this size
|
||||||
xattr_set_filesize: Set ytdl.filesize user xattribute with expected size.
|
|
||||||
progress_delta: The minimum time between progress output, in seconds
|
progress_delta: The minimum time between progress output, in seconds
|
||||||
external_downloader_args: A dictionary of downloader keys (in lower case)
|
external_downloader_args: A dictionary of downloader keys (in lower case)
|
||||||
and a list of additional command-line arguments for the
|
and a list of additional command-line arguments for the
|
||||||
@@ -455,14 +454,26 @@ def download(self, filename, info_dict, subtitle=False):
|
|||||||
self._finish_multiline_status()
|
self._finish_multiline_status()
|
||||||
return True, False
|
return True, False
|
||||||
|
|
||||||
|
sleep_note = ''
|
||||||
if subtitle:
|
if subtitle:
|
||||||
sleep_interval = self.params.get('sleep_interval_subtitles') or 0
|
sleep_interval = self.params.get('sleep_interval_subtitles') or 0
|
||||||
else:
|
else:
|
||||||
min_sleep_interval = self.params.get('sleep_interval') or 0
|
min_sleep_interval = self.params.get('sleep_interval') or 0
|
||||||
|
max_sleep_interval = self.params.get('max_sleep_interval') or 0
|
||||||
|
|
||||||
|
if available_at := info_dict.get('available_at'):
|
||||||
|
forced_sleep_interval = available_at - int(time.time())
|
||||||
|
if forced_sleep_interval > min_sleep_interval:
|
||||||
|
sleep_note = 'as required by the site'
|
||||||
|
min_sleep_interval = forced_sleep_interval
|
||||||
|
if forced_sleep_interval > max_sleep_interval:
|
||||||
|
max_sleep_interval = forced_sleep_interval
|
||||||
|
|
||||||
sleep_interval = random.uniform(
|
sleep_interval = random.uniform(
|
||||||
min_sleep_interval, self.params.get('max_sleep_interval') or min_sleep_interval)
|
min_sleep_interval, max_sleep_interval or min_sleep_interval)
|
||||||
|
|
||||||
if sleep_interval > 0:
|
if sleep_interval > 0:
|
||||||
self.to_screen(f'[download] Sleeping {sleep_interval:.2f} seconds ...')
|
self.to_screen(f'[download] Sleeping {sleep_interval:.2f} seconds {sleep_note}...')
|
||||||
time.sleep(sleep_interval)
|
time.sleep(sleep_interval)
|
||||||
|
|
||||||
ret = self.real_download(filename, info_dict)
|
ret = self.real_download(filename, info_dict)
|
||||||
@@ -495,3 +506,14 @@ def _debug_cmd(self, args, exe=None):
|
|||||||
exe = os.path.basename(args[0])
|
exe = os.path.basename(args[0])
|
||||||
|
|
||||||
self.write_debug(f'{exe} command line: {shell_quote(args)}')
|
self.write_debug(f'{exe} command line: {shell_quote(args)}')
|
||||||
|
|
||||||
|
def _get_impersonate_target(self, info_dict):
|
||||||
|
impersonate = info_dict.get('impersonate')
|
||||||
|
if impersonate is None:
|
||||||
|
return None
|
||||||
|
available_target, requested_targets = self.ydl._parse_impersonate_targets(impersonate)
|
||||||
|
if available_target:
|
||||||
|
return available_target
|
||||||
|
elif requested_targets:
|
||||||
|
self.report_warning(self.ydl._unavailable_targets_message(requested_targets))
|
||||||
|
return None
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
from . import get_suitable_downloader
|
from . import get_suitable_downloader
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from ..utils import update_url_query, urljoin
|
from ..utils import ReExtractInfo, update_url_query, urljoin
|
||||||
|
|
||||||
|
|
||||||
class DashSegmentsFD(FragmentFD):
|
class DashSegmentsFD(FragmentFD):
|
||||||
@@ -28,6 +28,11 @@ def real_download(self, filename, info_dict):
|
|||||||
requested_formats = [{**info_dict, **fmt} for fmt in info_dict.get('requested_formats', [])]
|
requested_formats = [{**info_dict, **fmt} for fmt in info_dict.get('requested_formats', [])]
|
||||||
args = []
|
args = []
|
||||||
for fmt in requested_formats or [info_dict]:
|
for fmt in requested_formats or [info_dict]:
|
||||||
|
# Re-extract if --load-info-json is used and 'fragments' was originally a generator
|
||||||
|
# See https://github.com/yt-dlp/yt-dlp/issues/13906
|
||||||
|
if isinstance(fmt['fragments'], str):
|
||||||
|
raise ReExtractInfo('the stream needs to be re-extracted', expected=True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fragment_count = 1 if self.params.get('test') else len(fmt['fragments'])
|
fragment_count = 1 if self.params.get('test') else len(fmt['fragments'])
|
||||||
except TypeError:
|
except TypeError:
|
||||||
|
|||||||
@@ -563,7 +563,7 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
||||||
for cookie in cookies)])
|
for cookie in cookies)])
|
||||||
if fmt.get('http_headers') and is_http:
|
if fmt.get('http_headers') and is_http:
|
||||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg:
|
||||||
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
||||||
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
||||||
|
|
||||||
@@ -572,7 +572,21 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
if end_time:
|
if end_time:
|
||||||
args += ['-t', str(end_time - start_time)]
|
args += ['-t', str(end_time - start_time)]
|
||||||
|
|
||||||
args += [*self._configuration_args((f'_i{i + 1}', '_i')), '-i', fmt['url']]
|
url = fmt['url']
|
||||||
|
if self.params.get('enable_file_urls') and url.startswith('file:'):
|
||||||
|
# The default protocol_whitelist is 'file,crypto,data' when reading local m3u8 URLs,
|
||||||
|
# so only local segments can be read unless we also include 'http,https,tcp,tls'
|
||||||
|
args += ['-protocol_whitelist', 'file,crypto,data,http,https,tcp,tls']
|
||||||
|
# ffmpeg incorrectly handles 'file:' URLs by only removing the
|
||||||
|
# 'file:' prefix and treating the rest as if it's a normal filepath.
|
||||||
|
# FFmpegPostProcessor also depends on this behavior, so we need to fixup the URLs:
|
||||||
|
# - On Windows/Cygwin, replace 'file:///' and 'file://localhost/' with 'file:'
|
||||||
|
# - On *nix, replace 'file://localhost/' with 'file:/'
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13781
|
||||||
|
# https://trac.ffmpeg.org/ticket/2702
|
||||||
|
url = re.sub(r'^file://(?:localhost)?/', 'file:' if os.name == 'nt' else 'file:/', url)
|
||||||
|
|
||||||
|
args += [*self._configuration_args((f'_i{i + 1}', '_i')), '-i', url]
|
||||||
|
|
||||||
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
||||||
args += ['-c', 'copy']
|
args += ['-c', 'copy']
|
||||||
@@ -640,10 +654,6 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
return retval
|
return retval
|
||||||
|
|
||||||
|
|
||||||
class AVconvFD(FFmpegFD):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_BY_NAME = {
|
_BY_NAME = {
|
||||||
klass.get_basename(): klass
|
klass.get_basename(): klass
|
||||||
for name, klass in globals().items()
|
for name, klass in globals().items()
|
||||||
|
|||||||
@@ -149,14 +149,14 @@ def read_abst(self):
|
|||||||
segments_count = self.read_unsigned_char()
|
segments_count = self.read_unsigned_char()
|
||||||
segments = []
|
segments = []
|
||||||
for _ in range(segments_count):
|
for _ in range(segments_count):
|
||||||
box_size, box_type, box_data = self.read_box_info()
|
_box_size, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'asrt'
|
assert box_type == b'asrt'
|
||||||
segment = FlvReader(box_data).read_asrt()
|
segment = FlvReader(box_data).read_asrt()
|
||||||
segments.append(segment)
|
segments.append(segment)
|
||||||
fragments_run_count = self.read_unsigned_char()
|
fragments_run_count = self.read_unsigned_char()
|
||||||
fragments = []
|
fragments = []
|
||||||
for _ in range(fragments_run_count):
|
for _ in range(fragments_run_count):
|
||||||
box_size, box_type, box_data = self.read_box_info()
|
_box_size, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'afrt'
|
assert box_type == b'afrt'
|
||||||
fragments.append(FlvReader(box_data).read_afrt())
|
fragments.append(FlvReader(box_data).read_afrt())
|
||||||
|
|
||||||
@@ -167,7 +167,7 @@ def read_abst(self):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def read_bootstrap_info(self):
|
def read_bootstrap_info(self):
|
||||||
total_size, box_type, box_data = self.read_box_info()
|
_, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'abst'
|
assert box_type == b'abst'
|
||||||
return FlvReader(box_data).read_abst()
|
return FlvReader(box_data).read_abst()
|
||||||
|
|
||||||
@@ -324,9 +324,9 @@ def real_download(self, filename, info_dict):
|
|||||||
if requested_bitrate is None or len(formats) == 1:
|
if requested_bitrate is None or len(formats) == 1:
|
||||||
# get the best format
|
# get the best format
|
||||||
formats = sorted(formats, key=lambda f: f[0])
|
formats = sorted(formats, key=lambda f: f[0])
|
||||||
rate, media = formats[-1]
|
_, media = formats[-1]
|
||||||
else:
|
else:
|
||||||
rate, media = next(filter(
|
_, media = next(filter(
|
||||||
lambda f: int(f[0]) == requested_bitrate, formats))
|
lambda f: int(f[0]) == requested_bitrate, formats))
|
||||||
|
|
||||||
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
||||||
|
|||||||
@@ -302,7 +302,7 @@ def _finish_frag_download(self, ctx, info_dict):
|
|||||||
elif to_file:
|
elif to_file:
|
||||||
self.try_rename(ctx['tmpfilename'], ctx['filename'])
|
self.try_rename(ctx['tmpfilename'], ctx['filename'])
|
||||||
filetime = ctx.get('fragment_filetime')
|
filetime = ctx.get('fragment_filetime')
|
||||||
if self.params.get('updatetime', True) and filetime:
|
if self.params.get('updatetime') and filetime:
|
||||||
with contextlib.suppress(Exception):
|
with contextlib.suppress(Exception):
|
||||||
os.utime(ctx['filename'], (time.time(), filetime))
|
os.utime(ctx['filename'], (time.time(), filetime))
|
||||||
|
|
||||||
|
|||||||
@@ -94,12 +94,19 @@ def real_download(self, filename, info_dict):
|
|||||||
can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None
|
can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None
|
||||||
if can_download:
|
if can_download:
|
||||||
has_ffmpeg = FFmpegFD.available()
|
has_ffmpeg = FFmpegFD.available()
|
||||||
no_crypto = not Cryptodome.AES and '#EXT-X-KEY:METHOD=AES-128' in s
|
if not Cryptodome.AES and '#EXT-X-KEY:METHOD=AES-128' in s:
|
||||||
if no_crypto and has_ffmpeg:
|
# Even if pycryptodomex isn't available, force HlsFD for m3u8s that won't work with ffmpeg
|
||||||
can_download, message = False, 'The stream has AES-128 encryption and pycryptodomex is not available'
|
ffmpeg_can_dl = not traverse_obj(info_dict, ((
|
||||||
elif no_crypto:
|
'extra_param_to_segment_url', 'extra_param_to_key_url',
|
||||||
message = ('The stream has AES-128 encryption and neither ffmpeg nor pycryptodomex are available; '
|
'hls_media_playlist_data', ('hls_aes', ('uri', 'key', 'iv')),
|
||||||
'Decryption will be performed natively, but will be extremely slow')
|
), any))
|
||||||
|
message = 'The stream has AES-128 encryption and {} available'.format(
|
||||||
|
'neither ffmpeg nor pycryptodomex are' if ffmpeg_can_dl and not has_ffmpeg else
|
||||||
|
'pycryptodomex is not')
|
||||||
|
if has_ffmpeg and ffmpeg_can_dl:
|
||||||
|
can_download = False
|
||||||
|
else:
|
||||||
|
message += '; decryption will be performed natively, but will be extremely slow'
|
||||||
elif info_dict.get('extractor_key') == 'Generic' and re.search(r'(?m)#EXT-X-MEDIA-SEQUENCE:(?!0$)', s):
|
elif info_dict.get('extractor_key') == 'Generic' and re.search(r'(?m)#EXT-X-MEDIA-SEQUENCE:(?!0$)', s):
|
||||||
install_ffmpeg = '' if has_ffmpeg else 'install ffmpeg and '
|
install_ffmpeg = '' if has_ffmpeg else 'install ffmpeg and '
|
||||||
message = ('Live HLS streams are not supported by the native downloader. If this is a livestream, '
|
message = ('Live HLS streams are not supported by the native downloader. If this is a livestream, '
|
||||||
@@ -198,7 +205,7 @@ def is_ad_fragment_end(s):
|
|||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line:
|
if line:
|
||||||
if not line.startswith('#'):
|
if not line.startswith('#'):
|
||||||
if format_index and discontinuity_count != format_index:
|
if format_index is not None and discontinuity_count != format_index:
|
||||||
continue
|
continue
|
||||||
if ad_frag_next:
|
if ad_frag_next:
|
||||||
continue
|
continue
|
||||||
@@ -224,7 +231,7 @@ def is_ad_fragment_end(s):
|
|||||||
byte_range = {}
|
byte_range = {}
|
||||||
|
|
||||||
elif line.startswith('#EXT-X-MAP'):
|
elif line.startswith('#EXT-X-MAP'):
|
||||||
if format_index and discontinuity_count != format_index:
|
if format_index is not None and discontinuity_count != format_index:
|
||||||
continue
|
continue
|
||||||
if frag_index > 0:
|
if frag_index > 0:
|
||||||
self.report_error(
|
self.report_error(
|
||||||
|
|||||||
@@ -13,12 +13,9 @@
|
|||||||
ContentTooShortError,
|
ContentTooShortError,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
ThrottledDownload,
|
ThrottledDownload,
|
||||||
XAttrMetadataError,
|
|
||||||
XAttrUnavailableError,
|
|
||||||
int_or_none,
|
int_or_none,
|
||||||
parse_http_range,
|
parse_http_range,
|
||||||
try_call,
|
try_call,
|
||||||
write_xattr,
|
|
||||||
)
|
)
|
||||||
from ..utils.networking import HTTPHeaderDict
|
from ..utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
@@ -27,6 +24,10 @@ class HttpFD(FileDownloader):
|
|||||||
def real_download(self, filename, info_dict):
|
def real_download(self, filename, info_dict):
|
||||||
url = info_dict['url']
|
url = info_dict['url']
|
||||||
request_data = info_dict.get('request_data', None)
|
request_data = info_dict.get('request_data', None)
|
||||||
|
request_extensions = {}
|
||||||
|
impersonate_target = self._get_impersonate_target(info_dict)
|
||||||
|
if impersonate_target is not None:
|
||||||
|
request_extensions['impersonate'] = impersonate_target
|
||||||
|
|
||||||
class DownloadContext(dict):
|
class DownloadContext(dict):
|
||||||
__getattr__ = dict.get
|
__getattr__ = dict.get
|
||||||
@@ -109,7 +110,7 @@ def establish_connection():
|
|||||||
if try_call(lambda: range_end >= ctx.content_len):
|
if try_call(lambda: range_end >= ctx.content_len):
|
||||||
range_end = ctx.content_len - 1
|
range_end = ctx.content_len - 1
|
||||||
|
|
||||||
request = Request(url, request_data, headers)
|
request = Request(url, request_data, headers, extensions=request_extensions)
|
||||||
has_range = range_start is not None
|
has_range = range_start is not None
|
||||||
if has_range:
|
if has_range:
|
||||||
request.headers['Range'] = f'bytes={int(range_start)}-{int_or_none(range_end) or ""}'
|
request.headers['Range'] = f'bytes={int(range_start)}-{int_or_none(range_end) or ""}'
|
||||||
@@ -269,12 +270,6 @@ def retry(e):
|
|||||||
self.report_error(f'unable to open for writing: {err}')
|
self.report_error(f'unable to open for writing: {err}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.params.get('xattr_set_filesize', False) and data_len is not None:
|
|
||||||
try:
|
|
||||||
write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())
|
|
||||||
except (XAttrUnavailableError, XAttrMetadataError) as err:
|
|
||||||
self.report_error(f'unable to set filesize xattr: {err}')
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ctx.stream.write(data_block)
|
ctx.stream.write(data_block)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
@@ -348,7 +343,7 @@ def retry(e):
|
|||||||
self.try_rename(ctx.tmpfilename, ctx.filename)
|
self.try_rename(ctx.tmpfilename, ctx.filename)
|
||||||
|
|
||||||
# Update file modification time
|
# Update file modification time
|
||||||
if self.params.get('updatetime', True):
|
if self.params.get('updatetime'):
|
||||||
info_dict['filetime'] = self.try_utime(ctx.filename, ctx.data.headers.get('last-modified', None))
|
info_dict['filetime'] = self.try_utime(ctx.filename, ctx.data.headers.get('last-modified', None))
|
||||||
|
|
||||||
self._hook_progress({
|
self._hook_progress({
|
||||||
|
|||||||
@@ -5,47 +5,46 @@
|
|||||||
from .common import FileDownloader
|
from .common import FileDownloader
|
||||||
from .external import FFmpegFD
|
from .external import FFmpegFD
|
||||||
from ..networking import Request
|
from ..networking import Request
|
||||||
from ..utils import DownloadError, str_or_none, try_get
|
from ..networking.websocket import WebSocketResponse
|
||||||
|
from ..utils import DownloadError, str_or_none, truncate_string
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
class NiconicoLiveFD(FileDownloader):
|
class NiconicoLiveFD(FileDownloader):
|
||||||
""" Downloads niconico live without being stopped """
|
""" Downloads niconico live without being stopped """
|
||||||
|
|
||||||
def real_download(self, filename, info_dict):
|
def real_download(self, filename, info_dict):
|
||||||
video_id = info_dict['video_id']
|
video_id = info_dict['id']
|
||||||
ws_url = info_dict['url']
|
opts = info_dict['downloader_options']
|
||||||
ws_extractor = info_dict['ws']
|
quality, ws_extractor, ws_url = opts['max_quality'], opts['ws'], opts['ws_url']
|
||||||
ws_origin_host = info_dict['origin']
|
|
||||||
live_quality = info_dict.get('live_quality', 'high')
|
|
||||||
live_latency = info_dict.get('live_latency', 'high')
|
|
||||||
dl = FFmpegFD(self.ydl, self.params or {})
|
dl = FFmpegFD(self.ydl, self.params or {})
|
||||||
|
|
||||||
new_info_dict = info_dict.copy()
|
new_info_dict = info_dict.copy()
|
||||||
new_info_dict.update({
|
new_info_dict['protocol'] = 'm3u8'
|
||||||
'protocol': 'm3u8',
|
|
||||||
})
|
|
||||||
|
|
||||||
def communicate_ws(reconnect):
|
def communicate_ws(reconnect):
|
||||||
if reconnect:
|
# Support --load-info-json as if it is a reconnect attempt
|
||||||
ws = self.ydl.urlopen(Request(ws_url, headers={'Origin': f'https://{ws_origin_host}'}))
|
if reconnect or not isinstance(ws_extractor, WebSocketResponse):
|
||||||
|
ws = self.ydl.urlopen(Request(
|
||||||
|
ws_url, headers={'Origin': 'https://live.nicovideo.jp'}))
|
||||||
if self.ydl.params.get('verbose', False):
|
if self.ydl.params.get('verbose', False):
|
||||||
self.to_screen('[debug] Sending startWatching request')
|
self.write_debug('Sending startWatching request')
|
||||||
ws.send(json.dumps({
|
ws.send(json.dumps({
|
||||||
'type': 'startWatching',
|
|
||||||
'data': {
|
'data': {
|
||||||
|
'reconnect': True,
|
||||||
|
'room': {
|
||||||
|
'commentable': True,
|
||||||
|
'protocol': 'webSocket',
|
||||||
|
},
|
||||||
'stream': {
|
'stream': {
|
||||||
'quality': live_quality,
|
|
||||||
'protocol': 'hls+fmp4',
|
|
||||||
'latency': live_latency,
|
|
||||||
'accessRightMethod': 'single_cookie',
|
'accessRightMethod': 'single_cookie',
|
||||||
'chasePlay': False,
|
'chasePlay': False,
|
||||||
|
'latency': 'high',
|
||||||
|
'protocol': 'hls',
|
||||||
|
'quality': quality,
|
||||||
},
|
},
|
||||||
'room': {
|
|
||||||
'protocol': 'webSocket',
|
|
||||||
'commentable': True,
|
|
||||||
},
|
|
||||||
'reconnect': True,
|
|
||||||
},
|
},
|
||||||
|
'type': 'startWatching',
|
||||||
}))
|
}))
|
||||||
else:
|
else:
|
||||||
ws = ws_extractor
|
ws = ws_extractor
|
||||||
@@ -58,7 +57,6 @@ def communicate_ws(reconnect):
|
|||||||
if not data or not isinstance(data, dict):
|
if not data or not isinstance(data, dict):
|
||||||
continue
|
continue
|
||||||
if data.get('type') == 'ping':
|
if data.get('type') == 'ping':
|
||||||
# pong back
|
|
||||||
ws.send(r'{"type":"pong"}')
|
ws.send(r'{"type":"pong"}')
|
||||||
ws.send(r'{"type":"keepSeat"}')
|
ws.send(r'{"type":"keepSeat"}')
|
||||||
elif data.get('type') == 'disconnect':
|
elif data.get('type') == 'disconnect':
|
||||||
@@ -66,12 +64,10 @@ def communicate_ws(reconnect):
|
|||||||
return True
|
return True
|
||||||
elif data.get('type') == 'error':
|
elif data.get('type') == 'error':
|
||||||
self.write_debug(data)
|
self.write_debug(data)
|
||||||
message = try_get(data, lambda x: x['body']['code'], str) or recv
|
message = traverse_obj(data, ('body', 'code', {str_or_none}), default=recv)
|
||||||
return DownloadError(message)
|
return DownloadError(message)
|
||||||
elif self.ydl.params.get('verbose', False):
|
elif self.ydl.params.get('verbose', False):
|
||||||
if len(recv) > 100:
|
self.write_debug(f'Server response: {truncate_string(recv, 100)}')
|
||||||
recv = recv[:100] + '...'
|
|
||||||
self.to_screen(f'[debug] Server said: {recv}')
|
|
||||||
|
|
||||||
def ws_main():
|
def ws_main():
|
||||||
reconnect = False
|
reconnect = False
|
||||||
@@ -81,7 +77,8 @@ def ws_main():
|
|||||||
if ret is True:
|
if ret is True:
|
||||||
return
|
return
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
self.to_screen('[{}] {}: Connection error occured, reconnecting after 10 seconds: {}'.format('niconico:live', video_id, str_or_none(e)))
|
self.to_screen(
|
||||||
|
f'[niconico:live] {video_id}: Connection error occured, reconnecting after 10 seconds: {e}')
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
continue
|
continue
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -58,13 +58,7 @@
|
|||||||
ADNSeasonIE,
|
ADNSeasonIE,
|
||||||
)
|
)
|
||||||
from .adobeconnect import AdobeConnectIE
|
from .adobeconnect import AdobeConnectIE
|
||||||
from .adobetv import (
|
from .adobetv import AdobeTVVideoIE
|
||||||
AdobeTVChannelIE,
|
|
||||||
AdobeTVEmbedIE,
|
|
||||||
AdobeTVIE,
|
|
||||||
AdobeTVShowIE,
|
|
||||||
AdobeTVVideoIE,
|
|
||||||
)
|
|
||||||
from .adultswim import AdultSwimIE
|
from .adultswim import AdultSwimIE
|
||||||
from .aenetworks import (
|
from .aenetworks import (
|
||||||
AENetworksCollectionIE,
|
AENetworksCollectionIE,
|
||||||
@@ -152,7 +146,6 @@
|
|||||||
ARDBetaMediathekIE,
|
ARDBetaMediathekIE,
|
||||||
ARDMediathekCollectionIE,
|
ARDMediathekCollectionIE,
|
||||||
)
|
)
|
||||||
from .arkena import ArkenaIE
|
|
||||||
from .arnes import ArnesIE
|
from .arnes import ArnesIE
|
||||||
from .art19 import (
|
from .art19 import (
|
||||||
Art19IE,
|
Art19IE,
|
||||||
@@ -201,7 +194,6 @@
|
|||||||
BanByeChannelIE,
|
BanByeChannelIE,
|
||||||
BanByeIE,
|
BanByeIE,
|
||||||
)
|
)
|
||||||
from .bandaichannel import BandaiChannelIE
|
|
||||||
from .bandcamp import (
|
from .bandcamp import (
|
||||||
BandcampAlbumIE,
|
BandcampAlbumIE,
|
||||||
BandcampIE,
|
BandcampIE,
|
||||||
@@ -229,7 +221,6 @@
|
|||||||
from .beatport import BeatportIE
|
from .beatport import BeatportIE
|
||||||
from .beeg import BeegIE
|
from .beeg import BeegIE
|
||||||
from .behindkink import BehindKinkIE
|
from .behindkink import BehindKinkIE
|
||||||
from .bellmedia import BellMediaIE
|
|
||||||
from .berufetv import BerufeTVIE
|
from .berufetv import BerufeTVIE
|
||||||
from .bet import BetIE
|
from .bet import BetIE
|
||||||
from .bfi import BFIPlayerIE
|
from .bfi import BFIPlayerIE
|
||||||
@@ -275,7 +266,10 @@
|
|||||||
BitChuteChannelIE,
|
BitChuteChannelIE,
|
||||||
BitChuteIE,
|
BitChuteIE,
|
||||||
)
|
)
|
||||||
from .blackboardcollaborate import BlackboardCollaborateIE
|
from .blackboardcollaborate import (
|
||||||
|
BlackboardCollaborateIE,
|
||||||
|
BlackboardCollaborateLaunchIE,
|
||||||
|
)
|
||||||
from .bleacherreport import (
|
from .bleacherreport import (
|
||||||
BleacherReportCMSIE,
|
BleacherReportCMSIE,
|
||||||
BleacherReportIE,
|
BleacherReportIE,
|
||||||
@@ -300,7 +294,6 @@
|
|||||||
BrainPOPIlIE,
|
BrainPOPIlIE,
|
||||||
BrainPOPJrIE,
|
BrainPOPJrIE,
|
||||||
)
|
)
|
||||||
from .bravotv import BravoTVIE
|
|
||||||
from .breitbart import BreitBartIE
|
from .breitbart import BreitBartIE
|
||||||
from .brightcove import (
|
from .brightcove import (
|
||||||
BrightcoveLegacyIE,
|
BrightcoveLegacyIE,
|
||||||
@@ -310,6 +303,7 @@
|
|||||||
BrilliantpalaClassesIE,
|
BrilliantpalaClassesIE,
|
||||||
BrilliantpalaElearnIE,
|
BrilliantpalaElearnIE,
|
||||||
)
|
)
|
||||||
|
from .btvplus import BTVPlusIE
|
||||||
from .bundesliga import BundesligaIE
|
from .bundesliga import BundesligaIE
|
||||||
from .bundestag import BundestagIE
|
from .bundestag import BundestagIE
|
||||||
from .bunnycdn import BunnyCdnIE
|
from .bunnycdn import BunnyCdnIE
|
||||||
@@ -338,7 +332,6 @@
|
|||||||
from .canalplus import CanalplusIE
|
from .canalplus import CanalplusIE
|
||||||
from .canalsurmas import CanalsurmasIE
|
from .canalsurmas import CanalsurmasIE
|
||||||
from .caracoltv import CaracolTvPlayIE
|
from .caracoltv import CaracolTvPlayIE
|
||||||
from .cartoonnetwork import CartoonNetworkIE
|
|
||||||
from .cbc import (
|
from .cbc import (
|
||||||
CBCIE,
|
CBCIE,
|
||||||
CBCGemIE,
|
CBCGemIE,
|
||||||
@@ -405,16 +398,12 @@
|
|||||||
from .cloudycdn import CloudyCDNIE
|
from .cloudycdn import CloudyCDNIE
|
||||||
from .clubic import ClubicIE
|
from .clubic import ClubicIE
|
||||||
from .clyp import ClypIE
|
from .clyp import ClypIE
|
||||||
from .cmt import CMTIE
|
|
||||||
from .cnbc import CNBCVideoIE
|
from .cnbc import CNBCVideoIE
|
||||||
from .cnn import (
|
from .cnn import (
|
||||||
CNNIE,
|
CNNIE,
|
||||||
CNNIndonesiaIE,
|
CNNIndonesiaIE,
|
||||||
)
|
)
|
||||||
from .comedycentral import (
|
from .comedycentral import ComedyCentralIE
|
||||||
ComedyCentralIE,
|
|
||||||
ComedyCentralTVIE,
|
|
||||||
)
|
|
||||||
from .commonmistakes import (
|
from .commonmistakes import (
|
||||||
BlobIE,
|
BlobIE,
|
||||||
CommonMistakesIE,
|
CommonMistakesIE,
|
||||||
@@ -435,7 +424,6 @@
|
|||||||
CPACPlaylistIE,
|
CPACPlaylistIE,
|
||||||
)
|
)
|
||||||
from .cracked import CrackedIE
|
from .cracked import CrackedIE
|
||||||
from .crackle import CrackleIE
|
|
||||||
from .craftsy import CraftsyIE
|
from .craftsy import CraftsyIE
|
||||||
from .crooksandliars import CrooksAndLiarsIE
|
from .crooksandliars import CrooksAndLiarsIE
|
||||||
from .crowdbunker import (
|
from .crowdbunker import (
|
||||||
@@ -448,7 +436,6 @@
|
|||||||
CSpanIE,
|
CSpanIE,
|
||||||
)
|
)
|
||||||
from .ctsnews import CtsNewsIE
|
from .ctsnews import CtsNewsIE
|
||||||
from .ctv import CTVIE
|
|
||||||
from .ctvnews import CTVNewsIE
|
from .ctvnews import CTVNewsIE
|
||||||
from .cultureunplugged import CultureUnpluggedIE
|
from .cultureunplugged import CultureUnpluggedIE
|
||||||
from .curiositystream import (
|
from .curiositystream import (
|
||||||
@@ -456,10 +443,6 @@
|
|||||||
CuriosityStreamIE,
|
CuriosityStreamIE,
|
||||||
CuriosityStreamSeriesIE,
|
CuriosityStreamSeriesIE,
|
||||||
)
|
)
|
||||||
from .cwtv import (
|
|
||||||
CWTVIE,
|
|
||||||
CWTVMovieIE,
|
|
||||||
)
|
|
||||||
from .cybrary import (
|
from .cybrary import (
|
||||||
CybraryCourseIE,
|
CybraryCourseIE,
|
||||||
CybraryIE,
|
CybraryIE,
|
||||||
@@ -572,10 +555,6 @@
|
|||||||
DWIE,
|
DWIE,
|
||||||
DWArticleIE,
|
DWArticleIE,
|
||||||
)
|
)
|
||||||
from .eagleplatform import (
|
|
||||||
ClipYouEmbedIE,
|
|
||||||
EaglePlatformIE,
|
|
||||||
)
|
|
||||||
from .ebaumsworld import EbaumsWorldIE
|
from .ebaumsworld import EbaumsWorldIE
|
||||||
from .ebay import EbayIE
|
from .ebay import EbayIE
|
||||||
from .egghead import (
|
from .egghead import (
|
||||||
@@ -641,6 +620,10 @@
|
|||||||
FancodeVodIE,
|
FancodeVodIE,
|
||||||
)
|
)
|
||||||
from .fathom import FathomIE
|
from .fathom import FathomIE
|
||||||
|
from .faulio import (
|
||||||
|
FaulioIE,
|
||||||
|
FaulioLiveIE,
|
||||||
|
)
|
||||||
from .faz import FazIE
|
from .faz import FazIE
|
||||||
from .fc2 import (
|
from .fc2 import (
|
||||||
FC2IE,
|
FC2IE,
|
||||||
@@ -807,9 +790,7 @@
|
|||||||
from .hotnewhiphop import HotNewHipHopIE
|
from .hotnewhiphop import HotNewHipHopIE
|
||||||
from .hotstar import (
|
from .hotstar import (
|
||||||
HotStarIE,
|
HotStarIE,
|
||||||
HotStarPlaylistIE,
|
|
||||||
HotStarPrefixIE,
|
HotStarPrefixIE,
|
||||||
HotStarSeasonIE,
|
|
||||||
HotStarSeriesIE,
|
HotStarSeriesIE,
|
||||||
)
|
)
|
||||||
from .hrefli import HrefLiRedirectIE
|
from .hrefli import HrefLiRedirectIE
|
||||||
@@ -923,17 +904,15 @@
|
|||||||
ShugiinItvVodIE,
|
ShugiinItvVodIE,
|
||||||
)
|
)
|
||||||
from .jeuxvideo import JeuxVideoIE
|
from .jeuxvideo import JeuxVideoIE
|
||||||
from .jiocinema import (
|
|
||||||
JioCinemaIE,
|
|
||||||
JioCinemaSeriesIE,
|
|
||||||
)
|
|
||||||
from .jiosaavn import (
|
from .jiosaavn import (
|
||||||
JioSaavnAlbumIE,
|
JioSaavnAlbumIE,
|
||||||
|
JioSaavnArtistIE,
|
||||||
JioSaavnPlaylistIE,
|
JioSaavnPlaylistIE,
|
||||||
|
JioSaavnShowIE,
|
||||||
|
JioSaavnShowPlaylistIE,
|
||||||
JioSaavnSongIE,
|
JioSaavnSongIE,
|
||||||
)
|
)
|
||||||
from .joj import JojIE
|
from .joj import JojIE
|
||||||
from .joqrag import JoqrAgIE
|
|
||||||
from .jove import JoveIE
|
from .jove import JoveIE
|
||||||
from .jstream import JStreamIE
|
from .jstream import JStreamIE
|
||||||
from .jtbc import (
|
from .jtbc import (
|
||||||
@@ -1036,11 +1015,6 @@
|
|||||||
LikeeIE,
|
LikeeIE,
|
||||||
LikeeUserIE,
|
LikeeUserIE,
|
||||||
)
|
)
|
||||||
from .limelight import (
|
|
||||||
LimelightChannelIE,
|
|
||||||
LimelightChannelListIE,
|
|
||||||
LimelightMediaIE,
|
|
||||||
)
|
|
||||||
from .linkedin import (
|
from .linkedin import (
|
||||||
LinkedInEventsIE,
|
LinkedInEventsIE,
|
||||||
LinkedInIE,
|
LinkedInIE,
|
||||||
@@ -1106,6 +1080,7 @@
|
|||||||
from .massengeschmacktv import MassengeschmackTVIE
|
from .massengeschmacktv import MassengeschmackTVIE
|
||||||
from .masters import MastersIE
|
from .masters import MastersIE
|
||||||
from .matchtv import MatchTVIE
|
from .matchtv import MatchTVIE
|
||||||
|
from .mave import MaveIE
|
||||||
from .mbn import MBNIE
|
from .mbn import MBNIE
|
||||||
from .mdr import MDRIE
|
from .mdr import MDRIE
|
||||||
from .medaltv import MedalTVIE
|
from .medaltv import MedalTVIE
|
||||||
@@ -1151,6 +1126,7 @@
|
|||||||
MindsIE,
|
MindsIE,
|
||||||
)
|
)
|
||||||
from .minoto import MinotoIE
|
from .minoto import MinotoIE
|
||||||
|
from .mir24tv import Mir24TvIE
|
||||||
from .mirrativ import (
|
from .mirrativ import (
|
||||||
MirrativIE,
|
MirrativIE,
|
||||||
MirrativUserIE,
|
MirrativUserIE,
|
||||||
@@ -1160,7 +1136,6 @@
|
|||||||
OCWMITIE,
|
OCWMITIE,
|
||||||
TechTVMITIE,
|
TechTVMITIE,
|
||||||
)
|
)
|
||||||
from .mitele import MiTeleIE
|
|
||||||
from .mixch import (
|
from .mixch import (
|
||||||
MixchArchiveIE,
|
MixchArchiveIE,
|
||||||
MixchIE,
|
MixchIE,
|
||||||
@@ -1171,6 +1146,10 @@
|
|||||||
MixcloudPlaylistIE,
|
MixcloudPlaylistIE,
|
||||||
MixcloudUserIE,
|
MixcloudUserIE,
|
||||||
)
|
)
|
||||||
|
from .mixlr import (
|
||||||
|
MixlrIE,
|
||||||
|
MixlrRecoringIE,
|
||||||
|
)
|
||||||
from .mlb import (
|
from .mlb import (
|
||||||
MLBIE,
|
MLBIE,
|
||||||
MLBTVIE,
|
MLBTVIE,
|
||||||
@@ -1194,15 +1173,7 @@
|
|||||||
from .moviezine import MoviezineIE
|
from .moviezine import MoviezineIE
|
||||||
from .movingimage import MovingImageIE
|
from .movingimage import MovingImageIE
|
||||||
from .msn import MSNIE
|
from .msn import MSNIE
|
||||||
from .mtv import (
|
from .mtv import MTVIE
|
||||||
MTVDEIE,
|
|
||||||
MTVIE,
|
|
||||||
MTVItaliaIE,
|
|
||||||
MTVItaliaProgrammaIE,
|
|
||||||
MTVJapanIE,
|
|
||||||
MTVServicesEmbeddedIE,
|
|
||||||
MTVVideoIE,
|
|
||||||
)
|
|
||||||
from .muenchentv import MuenchenTVIE
|
from .muenchentv import MuenchenTVIE
|
||||||
from .murrtube import (
|
from .murrtube import (
|
||||||
MurrtubeIE,
|
MurrtubeIE,
|
||||||
@@ -1260,6 +1231,7 @@
|
|||||||
)
|
)
|
||||||
from .nbc import (
|
from .nbc import (
|
||||||
NBCIE,
|
NBCIE,
|
||||||
|
BravoTVIE,
|
||||||
NBCNewsIE,
|
NBCNewsIE,
|
||||||
NBCOlympicsIE,
|
NBCOlympicsIE,
|
||||||
NBCOlympicsStreamIE,
|
NBCOlympicsStreamIE,
|
||||||
@@ -1267,6 +1239,7 @@
|
|||||||
NBCSportsStreamIE,
|
NBCSportsStreamIE,
|
||||||
NBCSportsVPlayerIE,
|
NBCSportsVPlayerIE,
|
||||||
NBCStationsIE,
|
NBCStationsIE,
|
||||||
|
SyfyIE,
|
||||||
)
|
)
|
||||||
from .ndr import (
|
from .ndr import (
|
||||||
NDRIE,
|
NDRIE,
|
||||||
@@ -1342,12 +1315,7 @@
|
|||||||
NhkVodProgramIE,
|
NhkVodProgramIE,
|
||||||
)
|
)
|
||||||
from .nhl import NHLIE
|
from .nhl import NHLIE
|
||||||
from .nick import (
|
from .nick import NickIE
|
||||||
NickBrIE,
|
|
||||||
NickDeIE,
|
|
||||||
NickIE,
|
|
||||||
NickRuIE,
|
|
||||||
)
|
|
||||||
from .niconico import (
|
from .niconico import (
|
||||||
NiconicoHistoryIE,
|
NiconicoHistoryIE,
|
||||||
NiconicoIE,
|
NiconicoIE,
|
||||||
@@ -1379,7 +1347,6 @@
|
|||||||
from .noice import NoicePodcastIE
|
from .noice import NoicePodcastIE
|
||||||
from .nonktube import NonkTubeIE
|
from .nonktube import NonkTubeIE
|
||||||
from .noodlemagazine import NoodleMagazineIE
|
from .noodlemagazine import NoodleMagazineIE
|
||||||
from .noovo import NoovoIE
|
|
||||||
from .nosnl import NOSNLArticleIE
|
from .nosnl import NOSNLArticleIE
|
||||||
from .nova import (
|
from .nova import (
|
||||||
NovaEmbedIE,
|
NovaEmbedIE,
|
||||||
@@ -1460,6 +1427,7 @@
|
|||||||
OnetPlIE,
|
OnetPlIE,
|
||||||
)
|
)
|
||||||
from .onionstudios import OnionStudiosIE
|
from .onionstudios import OnionStudiosIE
|
||||||
|
from .onsen import OnsenIE
|
||||||
from .opencast import (
|
from .opencast import (
|
||||||
OpencastIE,
|
OpencastIE,
|
||||||
OpencastPlaylistIE,
|
OpencastPlaylistIE,
|
||||||
@@ -1493,10 +1461,6 @@
|
|||||||
PanoptoListIE,
|
PanoptoListIE,
|
||||||
PanoptoPlaylistIE,
|
PanoptoPlaylistIE,
|
||||||
)
|
)
|
||||||
from .paramountplus import (
|
|
||||||
ParamountPlusIE,
|
|
||||||
ParamountPlusSeriesIE,
|
|
||||||
)
|
|
||||||
from .parler import ParlerIE
|
from .parler import ParlerIE
|
||||||
from .parlview import ParlviewIE
|
from .parlview import ParlviewIE
|
||||||
from .parti import (
|
from .parti import (
|
||||||
@@ -1550,16 +1514,12 @@
|
|||||||
PiramideTVChannelIE,
|
PiramideTVChannelIE,
|
||||||
PiramideTVIE,
|
PiramideTVIE,
|
||||||
)
|
)
|
||||||
from .pixivsketch import (
|
|
||||||
PixivSketchIE,
|
|
||||||
PixivSketchUserIE,
|
|
||||||
)
|
|
||||||
from .pladform import PladformIE
|
|
||||||
from .planetmarathi import PlanetMarathiIE
|
from .planetmarathi import PlanetMarathiIE
|
||||||
from .platzi import (
|
from .platzi import (
|
||||||
PlatziCourseIE,
|
PlatziCourseIE,
|
||||||
PlatziIE,
|
PlatziIE,
|
||||||
)
|
)
|
||||||
|
from .playerfm import PlayerFmIE
|
||||||
from .playplustv import PlayPlusTVIE
|
from .playplustv import PlayPlusTVIE
|
||||||
from .playsuisse import PlaySuisseIE
|
from .playsuisse import PlaySuisseIE
|
||||||
from .playtvak import PlaytvakIE
|
from .playtvak import PlaytvakIE
|
||||||
@@ -1570,6 +1530,7 @@
|
|||||||
)
|
)
|
||||||
from .plutotv import PlutoTVIE
|
from .plutotv import PlutoTVIE
|
||||||
from .plvideo import PlVideoIE
|
from .plvideo import PlVideoIE
|
||||||
|
from .plyr import PlyrEmbedIE
|
||||||
from .podbayfm import (
|
from .podbayfm import (
|
||||||
PodbayFMChannelIE,
|
PodbayFMChannelIE,
|
||||||
PodbayFMIE,
|
PodbayFMIE,
|
||||||
@@ -1785,6 +1746,7 @@
|
|||||||
RTVEALaCartaIE,
|
RTVEALaCartaIE,
|
||||||
RTVEAudioIE,
|
RTVEAudioIE,
|
||||||
RTVELiveIE,
|
RTVELiveIE,
|
||||||
|
RTVEProgramIE,
|
||||||
RTVETelevisionIE,
|
RTVETelevisionIE,
|
||||||
)
|
)
|
||||||
from .rtvs import RTVSIE
|
from .rtvs import RTVSIE
|
||||||
@@ -1808,7 +1770,6 @@
|
|||||||
RutubePlaylistIE,
|
RutubePlaylistIE,
|
||||||
RutubeTagsIE,
|
RutubeTagsIE,
|
||||||
)
|
)
|
||||||
from .rutv import RUTVIE
|
|
||||||
from .ruutu import RuutuIE
|
from .ruutu import RuutuIE
|
||||||
from .ruv import (
|
from .ruv import (
|
||||||
RuvIE,
|
RuvIE,
|
||||||
@@ -1826,6 +1787,7 @@
|
|||||||
from .saitosan import SaitosanIE
|
from .saitosan import SaitosanIE
|
||||||
from .samplefocus import SampleFocusIE
|
from .samplefocus import SampleFocusIE
|
||||||
from .sapo import SapoIE
|
from .sapo import SapoIE
|
||||||
|
from .sauceplus import SaucePlusIE
|
||||||
from .sbs import SBSIE
|
from .sbs import SBSIE
|
||||||
from .sbscokr import (
|
from .sbscokr import (
|
||||||
SBSCoKrAllvodProgramIE,
|
SBSCoKrAllvodProgramIE,
|
||||||
@@ -1868,6 +1830,7 @@
|
|||||||
from .sharepoint import SharePointIE
|
from .sharepoint import SharePointIE
|
||||||
from .sharevideos import ShareVideosEmbedIE
|
from .sharevideos import ShareVideosEmbedIE
|
||||||
from .shemaroome import ShemarooMeIE
|
from .shemaroome import ShemarooMeIE
|
||||||
|
from .shiey import ShieyIE
|
||||||
from .showroomlive import ShowRoomLiveIE
|
from .showroomlive import ShowRoomLiveIE
|
||||||
from .sibnet import SibnetEmbedIE
|
from .sibnet import SibnetEmbedIE
|
||||||
from .simplecast import (
|
from .simplecast import (
|
||||||
@@ -1876,7 +1839,6 @@
|
|||||||
SimplecastPodcastIE,
|
SimplecastPodcastIE,
|
||||||
)
|
)
|
||||||
from .sina import SinaIE
|
from .sina import SinaIE
|
||||||
from .sixplay import SixPlayIE
|
|
||||||
from .skeb import SkebIE
|
from .skeb import SkebIE
|
||||||
from .sky import (
|
from .sky import (
|
||||||
SkyNewsIE,
|
SkyNewsIE,
|
||||||
@@ -1904,7 +1866,12 @@
|
|||||||
from .slideshare import SlideshareIE
|
from .slideshare import SlideshareIE
|
||||||
from .slideslive import SlidesLiveIE
|
from .slideslive import SlidesLiveIE
|
||||||
from .slutload import SlutloadIE
|
from .slutload import SlutloadIE
|
||||||
from .smotrim import SmotrimIE
|
from .smotrim import (
|
||||||
|
SmotrimAudioIE,
|
||||||
|
SmotrimIE,
|
||||||
|
SmotrimLiveIE,
|
||||||
|
SmotrimPlaylistIE,
|
||||||
|
)
|
||||||
from .snapchat import SnapchatSpotlightIE
|
from .snapchat import SnapchatSpotlightIE
|
||||||
from .snotr import SnotrIE
|
from .snotr import SnotrIE
|
||||||
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
||||||
@@ -1932,12 +1899,13 @@
|
|||||||
SoundgasmProfileIE,
|
SoundgasmProfileIE,
|
||||||
)
|
)
|
||||||
from .southpark import (
|
from .southpark import (
|
||||||
|
SouthParkComBrIE,
|
||||||
|
SouthParkCoUkIE,
|
||||||
SouthParkDeIE,
|
SouthParkDeIE,
|
||||||
SouthParkDkIE,
|
SouthParkDkIE,
|
||||||
SouthParkEsIE,
|
SouthParkEsIE,
|
||||||
SouthParkIE,
|
SouthParkIE,
|
||||||
SouthParkLatIE,
|
SouthParkLatIE,
|
||||||
SouthParkNlIE,
|
|
||||||
)
|
)
|
||||||
from .sovietscloset import (
|
from .sovietscloset import (
|
||||||
SovietsClosetIE,
|
SovietsClosetIE,
|
||||||
@@ -1948,23 +1916,14 @@
|
|||||||
SpankBangPlaylistIE,
|
SpankBangPlaylistIE,
|
||||||
)
|
)
|
||||||
from .spiegel import SpiegelIE
|
from .spiegel import SpiegelIE
|
||||||
from .spike import (
|
|
||||||
BellatorIE,
|
|
||||||
ParamountNetworkIE,
|
|
||||||
)
|
|
||||||
from .sport5 import Sport5IE
|
from .sport5 import Sport5IE
|
||||||
from .sportbox import SportBoxIE
|
from .sportbox import SportBoxIE
|
||||||
from .sportdeutschland import SportDeutschlandIE
|
from .sportdeutschland import SportDeutschlandIE
|
||||||
from .spotify import (
|
|
||||||
SpotifyIE,
|
|
||||||
SpotifyShowIE,
|
|
||||||
)
|
|
||||||
from .spreaker import (
|
from .spreaker import (
|
||||||
SpreakerIE,
|
SpreakerIE,
|
||||||
SpreakerShowIE,
|
SpreakerShowIE,
|
||||||
)
|
)
|
||||||
from .springboardplatform import SpringboardPlatformIE
|
from .springboardplatform import SpringboardPlatformIE
|
||||||
from .sprout import SproutIE
|
|
||||||
from .sproutvideo import (
|
from .sproutvideo import (
|
||||||
SproutVideoIE,
|
SproutVideoIE,
|
||||||
VidsIoIE,
|
VidsIoIE,
|
||||||
@@ -1986,6 +1945,7 @@
|
|||||||
from .startv import StarTVIE
|
from .startv import StarTVIE
|
||||||
from .steam import (
|
from .steam import (
|
||||||
SteamCommunityBroadcastIE,
|
SteamCommunityBroadcastIE,
|
||||||
|
SteamCommunityIE,
|
||||||
SteamIE,
|
SteamIE,
|
||||||
)
|
)
|
||||||
from .stitcher import (
|
from .stitcher import (
|
||||||
@@ -2015,13 +1975,11 @@
|
|||||||
SverigesRadioPublicationIE,
|
SverigesRadioPublicationIE,
|
||||||
)
|
)
|
||||||
from .svt import (
|
from .svt import (
|
||||||
SVTIE,
|
|
||||||
SVTPageIE,
|
SVTPageIE,
|
||||||
SVTPlayIE,
|
SVTPlayIE,
|
||||||
SVTSeriesIE,
|
SVTSeriesIE,
|
||||||
)
|
)
|
||||||
from .swearnet import SwearnetEpisodeIE
|
from .swearnet import SwearnetEpisodeIE
|
||||||
from .syfy import SyfyIE
|
|
||||||
from .syvdk import SYVDKIE
|
from .syvdk import SYVDKIE
|
||||||
from .sztvhu import SztvHuIE
|
from .sztvhu import SztvHuIE
|
||||||
from .tagesschau import TagesschauIE
|
from .tagesschau import TagesschauIE
|
||||||
@@ -2100,6 +2058,7 @@
|
|||||||
TheGuardianPodcastIE,
|
TheGuardianPodcastIE,
|
||||||
TheGuardianPodcastPlaylistIE,
|
TheGuardianPodcastPlaylistIE,
|
||||||
)
|
)
|
||||||
|
from .thehighwire import TheHighWireIE
|
||||||
from .theholetv import TheHoleTvIE
|
from .theholetv import TheHoleTvIE
|
||||||
from .theintercept import TheInterceptIE
|
from .theintercept import TheInterceptIE
|
||||||
from .theplatform import (
|
from .theplatform import (
|
||||||
@@ -2146,6 +2105,7 @@
|
|||||||
from .toggo import ToggoIE
|
from .toggo import ToggoIE
|
||||||
from .tonline import TOnlineIE
|
from .tonline import TOnlineIE
|
||||||
from .toongoggles import ToonGogglesIE
|
from .toongoggles import ToonGogglesIE
|
||||||
|
from .toutiao import ToutiaoIE
|
||||||
from .toutv import TouTvIE
|
from .toutv import TouTvIE
|
||||||
from .toypics import (
|
from .toypics import (
|
||||||
ToypicsIE,
|
ToypicsIE,
|
||||||
@@ -2168,7 +2128,6 @@
|
|||||||
from .trueid import TrueIDIE
|
from .trueid import TrueIDIE
|
||||||
from .trunews import TruNewsIE
|
from .trunews import TruNewsIE
|
||||||
from .truth import TruthIE
|
from .truth import TruthIE
|
||||||
from .trutv import TruTVIE
|
|
||||||
from .tube8 import Tube8IE
|
from .tube8 import Tube8IE
|
||||||
from .tubetugraz import (
|
from .tubetugraz import (
|
||||||
TubeTuGrazIE,
|
TubeTuGrazIE,
|
||||||
@@ -2180,6 +2139,7 @@
|
|||||||
)
|
)
|
||||||
from .tumblr import TumblrIE
|
from .tumblr import TumblrIE
|
||||||
from .tunein import (
|
from .tunein import (
|
||||||
|
TuneInEmbedIE,
|
||||||
TuneInPodcastEpisodeIE,
|
TuneInPodcastEpisodeIE,
|
||||||
TuneInPodcastIE,
|
TuneInPodcastIE,
|
||||||
TuneInShortenerIE,
|
TuneInShortenerIE,
|
||||||
@@ -2218,7 +2178,6 @@
|
|||||||
from .tver import TVerIE
|
from .tver import TVerIE
|
||||||
from .tvigle import TvigleIE
|
from .tvigle import TvigleIE
|
||||||
from .tviplayer import TVIPlayerIE
|
from .tviplayer import TVIPlayerIE
|
||||||
from .tvland import TVLandIE
|
|
||||||
from .tvn24 import TVN24IE
|
from .tvn24 import TVN24IE
|
||||||
from .tvnoe import TVNoeIE
|
from .tvnoe import TVNoeIE
|
||||||
from .tvopengr import (
|
from .tvopengr import (
|
||||||
@@ -2239,6 +2198,7 @@
|
|||||||
from .tvplayer import TVPlayerIE
|
from .tvplayer import TVPlayerIE
|
||||||
from .tvw import (
|
from .tvw import (
|
||||||
TvwIE,
|
TvwIE,
|
||||||
|
TvwNewsIE,
|
||||||
TvwTvChannelsIE,
|
TvwTvChannelsIE,
|
||||||
)
|
)
|
||||||
from .tweakers import TweakersIE
|
from .tweakers import TweakersIE
|
||||||
@@ -2287,6 +2247,7 @@
|
|||||||
)
|
)
|
||||||
from .umg import UMGDeIE
|
from .umg import UMGDeIE
|
||||||
from .unistra import UnistraIE
|
from .unistra import UnistraIE
|
||||||
|
from .unitednations import UnitedNationsWebTvIE
|
||||||
from .unity import UnityIE
|
from .unity import UnityIE
|
||||||
from .unsupported import (
|
from .unsupported import (
|
||||||
KnownDRMIE,
|
KnownDRMIE,
|
||||||
@@ -2313,7 +2274,6 @@
|
|||||||
from .varzesh3 import Varzesh3IE
|
from .varzesh3 import Varzesh3IE
|
||||||
from .vbox7 import Vbox7IE
|
from .vbox7 import Vbox7IE
|
||||||
from .veo import VeoIE
|
from .veo import VeoIE
|
||||||
from .vesti import VestiIE
|
|
||||||
from .vevo import (
|
from .vevo import (
|
||||||
VevoIE,
|
VevoIE,
|
||||||
VevoPlaylistIE,
|
VevoPlaylistIE,
|
||||||
@@ -2368,6 +2328,7 @@
|
|||||||
VHXEmbedIE,
|
VHXEmbedIE,
|
||||||
VimeoAlbumIE,
|
VimeoAlbumIE,
|
||||||
VimeoChannelIE,
|
VimeoChannelIE,
|
||||||
|
VimeoEventIE,
|
||||||
VimeoGroupsIE,
|
VimeoGroupsIE,
|
||||||
VimeoIE,
|
VimeoIE,
|
||||||
VimeoLikesIE,
|
VimeoLikesIE,
|
||||||
@@ -2501,7 +2462,6 @@
|
|||||||
WykopPostCommentIE,
|
WykopPostCommentIE,
|
||||||
WykopPostIE,
|
WykopPostIE,
|
||||||
)
|
)
|
||||||
from .xanimu import XanimuIE
|
|
||||||
from .xboxclips import XboxClipsIE
|
from .xboxclips import XboxClipsIE
|
||||||
from .xhamster import (
|
from .xhamster import (
|
||||||
XHamsterEmbedIE,
|
XHamsterEmbedIE,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import uuid
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
@@ -10,6 +11,7 @@
|
|||||||
from ..utils import (
|
from ..utils import (
|
||||||
NO_DEFAULT,
|
NO_DEFAULT,
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
|
parse_qs,
|
||||||
unescapeHTML,
|
unescapeHTML,
|
||||||
unified_timestamp,
|
unified_timestamp,
|
||||||
urlencode_postdata,
|
urlencode_postdata,
|
||||||
@@ -45,6 +47,7 @@
|
|||||||
'name': 'Comcast XFINITY',
|
'name': 'Comcast XFINITY',
|
||||||
'username_field': 'user',
|
'username_field': 'user',
|
||||||
'password_field': 'passwd',
|
'password_field': 'passwd',
|
||||||
|
'login_hostname': 'login.xfinity.com',
|
||||||
},
|
},
|
||||||
'TWC': {
|
'TWC': {
|
||||||
'name': 'Time Warner Cable | Spectrum',
|
'name': 'Time Warner Cable | Spectrum',
|
||||||
@@ -74,6 +77,12 @@
|
|||||||
'name': 'Verizon FiOS',
|
'name': 'Verizon FiOS',
|
||||||
'username_field': 'IDToken1',
|
'username_field': 'IDToken1',
|
||||||
'password_field': 'IDToken2',
|
'password_field': 'IDToken2',
|
||||||
|
'login_hostname': 'ssoauth.verizon.com',
|
||||||
|
},
|
||||||
|
'Fubo': {
|
||||||
|
'name': 'Fubo',
|
||||||
|
'username_field': 'username',
|
||||||
|
'password_field': 'password',
|
||||||
},
|
},
|
||||||
'Cablevision': {
|
'Cablevision': {
|
||||||
'name': 'Optimum/Cablevision',
|
'name': 'Optimum/Cablevision',
|
||||||
@@ -1338,6 +1347,7 @@
|
|||||||
'name': 'Sling TV',
|
'name': 'Sling TV',
|
||||||
'username_field': 'username',
|
'username_field': 'username',
|
||||||
'password_field': 'password',
|
'password_field': 'password',
|
||||||
|
'login_hostname': 'identity.sling.com',
|
||||||
},
|
},
|
||||||
'Suddenlink': {
|
'Suddenlink': {
|
||||||
'name': 'Suddenlink',
|
'name': 'Suddenlink',
|
||||||
@@ -1355,7 +1365,6 @@
|
|||||||
class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should end with BaseIE/InfoExtractor
|
class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should end with BaseIE/InfoExtractor
|
||||||
_SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s'
|
_SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s'
|
||||||
_USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0'
|
_USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0'
|
||||||
_MODERN_USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; rv:131.0) Gecko/20100101 Firefox/131.0'
|
|
||||||
_MVPD_CACHE = 'ap-mvpd'
|
_MVPD_CACHE = 'ap-mvpd'
|
||||||
|
|
||||||
_DOWNLOADING_LOGIN_PAGE = 'Downloading Provider Login Page'
|
_DOWNLOADING_LOGIN_PAGE = 'Downloading Provider Login Page'
|
||||||
@@ -1367,6 +1376,11 @@ def _download_webpage_handle(self, *args, **kwargs):
|
|||||||
return super()._download_webpage_handle(
|
return super()._download_webpage_handle(
|
||||||
*args, **kwargs)
|
*args, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_mso_headers(mso_info):
|
||||||
|
# Not needed currently
|
||||||
|
return {}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_mvpd_resource(provider_id, title, guid, rating):
|
def _get_mvpd_resource(provider_id, title, guid, rating):
|
||||||
channel = etree.Element('channel')
|
channel = etree.Element('channel')
|
||||||
@@ -1382,7 +1396,13 @@ def _get_mvpd_resource(provider_id, title, guid, rating):
|
|||||||
resource_rating.text = rating
|
resource_rating.text = rating
|
||||||
return '<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">' + etree.tostring(channel).decode() + '</rss>'
|
return '<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">' + etree.tostring(channel).decode() + '</rss>'
|
||||||
|
|
||||||
def _extract_mvpd_auth(self, url, video_id, requestor_id, resource):
|
def _extract_mvpd_auth(self, url, video_id, requestor_id, resource, software_statement):
|
||||||
|
mso_id = self.get_param('ap_mso')
|
||||||
|
if mso_id:
|
||||||
|
mso_info = MSO_INFO[mso_id]
|
||||||
|
else:
|
||||||
|
mso_info = {}
|
||||||
|
|
||||||
def xml_text(xml_str, tag):
|
def xml_text(xml_str, tag):
|
||||||
return self._search_regex(
|
return self._search_regex(
|
||||||
f'<{tag}>(.+?)</{tag}>', xml_str, tag)
|
f'<{tag}>(.+?)</{tag}>', xml_str, tag)
|
||||||
@@ -1391,15 +1411,27 @@ def is_expired(token, date_ele):
|
|||||||
token_expires = unified_timestamp(re.sub(r'[_ ]GMT', '', xml_text(token, date_ele)))
|
token_expires = unified_timestamp(re.sub(r'[_ ]GMT', '', xml_text(token, date_ele)))
|
||||||
return token_expires and token_expires <= int(time.time())
|
return token_expires and token_expires <= int(time.time())
|
||||||
|
|
||||||
def post_form(form_page_res, note, data={}):
|
def post_form(form_page_res, note, data={}, validate_url=False):
|
||||||
form_page, urlh = form_page_res
|
form_page, urlh = form_page_res
|
||||||
post_url = self._html_search_regex(r'<form[^>]+action=(["\'])(?P<url>.+?)\1', form_page, 'post url', group='url')
|
post_url = self._html_search_regex(r'<form[^>]+action=(["\'])(?P<url>.+?)\1', form_page, 'post url', group='url')
|
||||||
if not re.match(r'https?://', post_url):
|
if not re.match(r'https?://', post_url):
|
||||||
post_url = urllib.parse.urljoin(urlh.url, post_url)
|
post_url = urllib.parse.urljoin(urlh.url, post_url)
|
||||||
|
if validate_url:
|
||||||
|
# This request is submitting credentials so we should validate it when possible
|
||||||
|
url_parsed = urllib.parse.urlparse(post_url)
|
||||||
|
expected_hostname = mso_info.get('login_hostname')
|
||||||
|
if expected_hostname and expected_hostname != url_parsed.hostname:
|
||||||
|
raise ExtractorError(
|
||||||
|
f'Unexpected login URL hostname; expected "{expected_hostname}" but got '
|
||||||
|
f'"{url_parsed.hostname}". Aborting before submitting credentials')
|
||||||
|
if url_parsed.scheme != 'https':
|
||||||
|
self.write_debug('Upgrading login URL scheme to https')
|
||||||
|
post_url = urllib.parse.urlunparse(url_parsed._replace(scheme='https'))
|
||||||
form_data = self._hidden_inputs(form_page)
|
form_data = self._hidden_inputs(form_page)
|
||||||
form_data.update(data)
|
form_data.update(data)
|
||||||
return self._download_webpage_handle(
|
return self._download_webpage_handle(
|
||||||
post_url, video_id, note, data=urlencode_postdata(form_data), headers={
|
post_url, video_id, note, data=urlencode_postdata(form_data), headers={
|
||||||
|
**self._get_mso_headers(mso_info),
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1432,40 +1464,72 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
}
|
}
|
||||||
|
|
||||||
guid = xml_text(resource, 'guid') if '<' in resource else resource
|
guid = xml_text(resource, 'guid') if '<' in resource else resource
|
||||||
count = 0
|
for _ in range(2):
|
||||||
while count < 2:
|
|
||||||
requestor_info = self.cache.load(self._MVPD_CACHE, requestor_id) or {}
|
requestor_info = self.cache.load(self._MVPD_CACHE, requestor_id) or {}
|
||||||
authn_token = requestor_info.get('authn_token')
|
authn_token = requestor_info.get('authn_token')
|
||||||
if authn_token and is_expired(authn_token, 'simpleTokenExpires'):
|
if authn_token and is_expired(authn_token, 'simpleTokenExpires'):
|
||||||
authn_token = None
|
authn_token = None
|
||||||
if not authn_token:
|
if not authn_token:
|
||||||
mso_id = self.get_param('ap_mso')
|
if not mso_id:
|
||||||
if mso_id:
|
raise_mvpd_required()
|
||||||
username, password = self._get_login_info('ap_username', 'ap_password', mso_id)
|
username, password = self._get_login_info('ap_username', 'ap_password', mso_id)
|
||||||
if not username or not password:
|
if not username or not password:
|
||||||
raise_mvpd_required()
|
|
||||||
mso_info = MSO_INFO[mso_id]
|
|
||||||
|
|
||||||
provider_redirect_page_res = self._download_webpage_handle(
|
|
||||||
self._SERVICE_PROVIDER_TEMPLATE % 'authenticate/saml', video_id,
|
|
||||||
'Downloading Provider Redirect Page', query={
|
|
||||||
'noflash': 'true',
|
|
||||||
'mso_id': mso_id,
|
|
||||||
'requestor_id': requestor_id,
|
|
||||||
'no_iframe': 'false',
|
|
||||||
'domain_name': 'adobe.com',
|
|
||||||
'redirect_url': url,
|
|
||||||
}, headers={
|
|
||||||
# yt-dlp's default user-agent is usually too old for Comcast_SSO
|
|
||||||
# See: https://github.com/yt-dlp/yt-dlp/issues/10848
|
|
||||||
'User-Agent': self._MODERN_USER_AGENT,
|
|
||||||
} if mso_id == 'Comcast_SSO' else None)
|
|
||||||
elif not self._cookies_passed:
|
|
||||||
raise_mvpd_required()
|
raise_mvpd_required()
|
||||||
|
|
||||||
if not mso_id:
|
device_info, urlh = self._download_json_handle(
|
||||||
pass
|
'https://sp.auth.adobe.com/indiv/devices',
|
||||||
elif mso_id == 'Comcast_SSO':
|
video_id, 'Registering device with Adobe',
|
||||||
|
data=json.dumps({'fingerprint': uuid.uuid4().hex}).encode(),
|
||||||
|
headers={'Content-Type': 'application/json; charset=UTF-8'})
|
||||||
|
|
||||||
|
device_id = device_info['deviceId']
|
||||||
|
mvpd_headers['pass_sfp'] = urlh.get_header('pass_sfp')
|
||||||
|
mvpd_headers['Ap_21'] = device_id
|
||||||
|
|
||||||
|
registration = self._download_json(
|
||||||
|
'https://sp.auth.adobe.com/o/client/register',
|
||||||
|
video_id, 'Registering client with Adobe',
|
||||||
|
data=json.dumps({'software_statement': software_statement}).encode(),
|
||||||
|
headers={'Content-Type': 'application/json; charset=UTF-8'})
|
||||||
|
|
||||||
|
access_token = self._download_json(
|
||||||
|
'https://sp.auth.adobe.com/o/client/token', video_id,
|
||||||
|
'Obtaining access token', data=urlencode_postdata({
|
||||||
|
'grant_type': 'client_credentials',
|
||||||
|
'client_id': registration['client_id'],
|
||||||
|
'client_secret': registration['client_secret'],
|
||||||
|
}),
|
||||||
|
headers={
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
||||||
|
})['access_token']
|
||||||
|
mvpd_headers['Authorization'] = f'Bearer {access_token}'
|
||||||
|
|
||||||
|
reg_code = self._download_json(
|
||||||
|
f'https://sp.auth.adobe.com/reggie/v1/{requestor_id}/regcode',
|
||||||
|
video_id, 'Obtaining registration code',
|
||||||
|
data=urlencode_postdata({
|
||||||
|
'requestor': requestor_id,
|
||||||
|
'deviceId': device_id,
|
||||||
|
'format': 'json',
|
||||||
|
}),
|
||||||
|
headers={
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
||||||
|
'Authorization': f'Bearer {access_token}',
|
||||||
|
})['code']
|
||||||
|
|
||||||
|
provider_redirect_page_res = self._download_webpage_handle(
|
||||||
|
self._SERVICE_PROVIDER_TEMPLATE % 'authenticate/saml', video_id,
|
||||||
|
'Downloading Provider Redirect Page', query={
|
||||||
|
'noflash': 'true',
|
||||||
|
'mso_id': mso_id,
|
||||||
|
'requestor_id': requestor_id,
|
||||||
|
'no_iframe': 'false',
|
||||||
|
'domain_name': 'adobe.com',
|
||||||
|
'redirect_url': url,
|
||||||
|
'reg_code': reg_code,
|
||||||
|
}, headers=self._get_mso_headers(mso_info))
|
||||||
|
|
||||||
|
if mso_id == 'Comcast_SSO':
|
||||||
# Comcast page flow varies by video site and whether you
|
# Comcast page flow varies by video site and whether you
|
||||||
# are on Comcast's network.
|
# are on Comcast's network.
|
||||||
provider_redirect_page, urlh = provider_redirect_page_res
|
provider_redirect_page, urlh = provider_redirect_page_res
|
||||||
@@ -1489,8 +1553,8 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
oauth_redirect_url = extract_redirect_url(
|
oauth_redirect_url = extract_redirect_url(
|
||||||
provider_redirect_page, fatal=True)
|
provider_redirect_page, fatal=True)
|
||||||
provider_login_page_res = self._download_webpage_handle(
|
provider_login_page_res = self._download_webpage_handle(
|
||||||
oauth_redirect_url, video_id,
|
oauth_redirect_url, video_id, self._DOWNLOADING_LOGIN_PAGE,
|
||||||
self._DOWNLOADING_LOGIN_PAGE)
|
headers=self._get_mso_headers(mso_info))
|
||||||
else:
|
else:
|
||||||
provider_login_page_res = post_form(
|
provider_login_page_res = post_form(
|
||||||
provider_redirect_page_res,
|
provider_redirect_page_res,
|
||||||
@@ -1500,24 +1564,35 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
provider_login_page_res, 'Logging in', {
|
provider_login_page_res, 'Logging in', {
|
||||||
mso_info['username_field']: username,
|
mso_info['username_field']: username,
|
||||||
mso_info['password_field']: password,
|
mso_info['password_field']: password,
|
||||||
})
|
}, validate_url=True)
|
||||||
mvpd_confirm_page, urlh = mvpd_confirm_page_res
|
mvpd_confirm_page, urlh = mvpd_confirm_page_res
|
||||||
if '<button class="submit" value="Resume">Resume</button>' in mvpd_confirm_page:
|
if '<button class="submit" value="Resume">Resume</button>' in mvpd_confirm_page:
|
||||||
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
||||||
elif mso_id == 'Philo':
|
elif mso_id == 'Philo':
|
||||||
# Philo has very unique authentication method
|
# Philo has very unique authentication method
|
||||||
self._download_webpage(
|
self._request_webpage(
|
||||||
'https://idp.philo.com/auth/init/login_code', video_id, 'Requesting auth code', data=urlencode_postdata({
|
'https://idp.philo.com/auth/init/login_code', video_id,
|
||||||
|
'Requesting Philo auth code', data=json.dumps({
|
||||||
'ident': username,
|
'ident': username,
|
||||||
'device': 'web',
|
'device': 'web',
|
||||||
'send_confirm_link': False,
|
'send_confirm_link': False,
|
||||||
'send_token': True,
|
'send_token': True,
|
||||||
}))
|
'device_ident': f'web-{uuid.uuid4().hex}',
|
||||||
|
'include_login_link': True,
|
||||||
|
}).encode(), headers={
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json',
|
||||||
|
})
|
||||||
|
|
||||||
philo_code = getpass.getpass('Type auth code you have received [Return]: ')
|
philo_code = getpass.getpass('Type auth code you have received [Return]: ')
|
||||||
self._download_webpage(
|
self._request_webpage(
|
||||||
'https://idp.philo.com/auth/update/login_code', video_id, 'Submitting token', data=urlencode_postdata({
|
'https://idp.philo.com/auth/update/login_code', video_id,
|
||||||
'token': philo_code,
|
'Submitting token', data=json.dumps({'token': philo_code}).encode(),
|
||||||
}))
|
headers={
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json',
|
||||||
|
})
|
||||||
|
|
||||||
mvpd_confirm_page_res = self._download_webpage_handle('https://idp.philo.com/idp/submit', video_id, 'Confirming Philo Login')
|
mvpd_confirm_page_res = self._download_webpage_handle('https://idp.philo.com/idp/submit', video_id, 'Confirming Philo Login')
|
||||||
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
||||||
elif mso_id == 'Verizon':
|
elif mso_id == 'Verizon':
|
||||||
@@ -1539,7 +1614,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
provider_redirect_page_res, 'Logging in', {
|
provider_redirect_page_res, 'Logging in', {
|
||||||
mso_info['username_field']: username,
|
mso_info['username_field']: username,
|
||||||
mso_info['password_field']: password,
|
mso_info['password_field']: password,
|
||||||
})
|
}, validate_url=True)
|
||||||
saml_login_page, urlh = saml_login_page_res
|
saml_login_page, urlh = saml_login_page_res
|
||||||
if 'Please try again.' in saml_login_page:
|
if 'Please try again.' in saml_login_page:
|
||||||
raise ExtractorError(
|
raise ExtractorError(
|
||||||
@@ -1560,7 +1635,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
[saml_login_page, saml_redirect_url], 'Logging in', {
|
[saml_login_page, saml_redirect_url], 'Logging in', {
|
||||||
mso_info['username_field']: username,
|
mso_info['username_field']: username,
|
||||||
mso_info['password_field']: password,
|
mso_info['password_field']: password,
|
||||||
})
|
}, validate_url=True)
|
||||||
if 'Please try again.' in saml_login_page:
|
if 'Please try again.' in saml_login_page:
|
||||||
raise ExtractorError(
|
raise ExtractorError(
|
||||||
'Failed to login, incorrect User ID or Password.')
|
'Failed to login, incorrect User ID or Password.')
|
||||||
@@ -1631,7 +1706,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
provider_login_page_res, 'Logging in', {
|
provider_login_page_res, 'Logging in', {
|
||||||
mso_info['username_field']: username,
|
mso_info['username_field']: username,
|
||||||
mso_info['password_field']: password,
|
mso_info['password_field']: password,
|
||||||
})
|
}, validate_url=True)
|
||||||
|
|
||||||
provider_refresh_redirect_url = extract_redirect_url(
|
provider_refresh_redirect_url = extract_redirect_url(
|
||||||
provider_association_redirect, url=urlh.url)
|
provider_association_redirect, url=urlh.url)
|
||||||
@@ -1682,7 +1757,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
provider_login_page_res, 'Logging in', {
|
provider_login_page_res, 'Logging in', {
|
||||||
mso_info['username_field']: username,
|
mso_info['username_field']: username,
|
||||||
mso_info['password_field']: password,
|
mso_info['password_field']: password,
|
||||||
})
|
}, validate_url=True)
|
||||||
|
|
||||||
provider_refresh_redirect_url = extract_redirect_url(
|
provider_refresh_redirect_url = extract_redirect_url(
|
||||||
provider_association_redirect, url=urlh.url)
|
provider_association_redirect, url=urlh.url)
|
||||||
@@ -1699,6 +1774,27 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
query=hidden_data)
|
query=hidden_data)
|
||||||
|
|
||||||
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
||||||
|
elif mso_id == 'Fubo':
|
||||||
|
_, urlh = provider_redirect_page_res
|
||||||
|
|
||||||
|
fubo_response = self._download_json(
|
||||||
|
'https://api.fubo.tv/partners/tve/connect', video_id,
|
||||||
|
'Authenticating with Fubo', 'Unable to authenticate with Fubo',
|
||||||
|
query=parse_qs(urlh.url), data=json.dumps({
|
||||||
|
'username': username,
|
||||||
|
'password': password,
|
||||||
|
}).encode(), headers={
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
})
|
||||||
|
|
||||||
|
self._request_webpage(
|
||||||
|
'https://sp.auth.adobe.com/adobe-services/oauth2', video_id,
|
||||||
|
'Authenticating with Adobe', 'Failed to authenticate with Adobe',
|
||||||
|
query={
|
||||||
|
'code': fubo_response['code'],
|
||||||
|
'state': fubo_response['state'],
|
||||||
|
})
|
||||||
else:
|
else:
|
||||||
# Some providers (e.g. DIRECTV NOW) have another meta refresh
|
# Some providers (e.g. DIRECTV NOW) have another meta refresh
|
||||||
# based redirect that should be followed.
|
# based redirect that should be followed.
|
||||||
@@ -1717,7 +1813,8 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
}
|
}
|
||||||
if mso_id in ('Cablevision', 'AlticeOne'):
|
if mso_id in ('Cablevision', 'AlticeOne'):
|
||||||
form_data['_eventId_proceed'] = ''
|
form_data['_eventId_proceed'] = ''
|
||||||
mvpd_confirm_page_res = post_form(provider_login_page_res, 'Logging in', form_data)
|
mvpd_confirm_page_res = post_form(
|
||||||
|
provider_login_page_res, 'Logging in', form_data, validate_url=True)
|
||||||
if mso_id != 'Rogers':
|
if mso_id != 'Rogers':
|
||||||
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
post_form(mvpd_confirm_page_res, 'Confirming Login')
|
||||||
|
|
||||||
@@ -1727,6 +1824,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
'Retrieving Session', data=urlencode_postdata({
|
'Retrieving Session', data=urlencode_postdata({
|
||||||
'_method': 'GET',
|
'_method': 'GET',
|
||||||
'requestor_id': requestor_id,
|
'requestor_id': requestor_id,
|
||||||
|
'reg_code': reg_code,
|
||||||
}), headers=mvpd_headers)
|
}), headers=mvpd_headers)
|
||||||
except ExtractorError as e:
|
except ExtractorError as e:
|
||||||
if not mso_id and isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
if not mso_id and isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
||||||
@@ -1734,7 +1832,6 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
raise
|
raise
|
||||||
if '<pendingLogout' in session:
|
if '<pendingLogout' in session:
|
||||||
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
||||||
count += 1
|
|
||||||
continue
|
continue
|
||||||
authn_token = unescapeHTML(xml_text(session, 'authnToken'))
|
authn_token = unescapeHTML(xml_text(session, 'authnToken'))
|
||||||
requestor_info['authn_token'] = authn_token
|
requestor_info['authn_token'] = authn_token
|
||||||
@@ -1755,7 +1852,6 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
}), headers=mvpd_headers)
|
}), headers=mvpd_headers)
|
||||||
if '<pendingLogout' in authorize:
|
if '<pendingLogout' in authorize:
|
||||||
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
||||||
count += 1
|
|
||||||
continue
|
continue
|
||||||
if '<error' in authorize:
|
if '<error' in authorize:
|
||||||
raise ExtractorError(xml_text(authorize, 'details'), expected=True)
|
raise ExtractorError(xml_text(authorize, 'details'), expected=True)
|
||||||
@@ -1778,6 +1874,5 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
}), headers=mvpd_headers)
|
}), headers=mvpd_headers)
|
||||||
if '<pendingLogout' in short_authorize:
|
if '<pendingLogout' in short_authorize:
|
||||||
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
self.cache.store(self._MVPD_CACHE, requestor_id, {})
|
||||||
count += 1
|
|
||||||
continue
|
continue
|
||||||
return short_authorize
|
return short_authorize
|
||||||
|
|||||||
@@ -1,285 +1,100 @@
|
|||||||
import functools
|
|
||||||
import re
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
ISO639Utils,
|
ISO639Utils,
|
||||||
OnDemandPagedList,
|
clean_html,
|
||||||
|
determine_ext,
|
||||||
float_or_none,
|
float_or_none,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
join_nonempty,
|
join_nonempty,
|
||||||
parse_duration,
|
url_or_none,
|
||||||
str_or_none,
|
|
||||||
str_to_int,
|
|
||||||
unified_strdate,
|
|
||||||
)
|
)
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVBaseIE(InfoExtractor):
|
class AdobeTVVideoIE(InfoExtractor):
|
||||||
def _call_api(self, path, video_id, query, note=None):
|
|
||||||
return self._download_json(
|
|
||||||
'http://tv.adobe.com/api/v4/' + path,
|
|
||||||
video_id, note, query=query)['data']
|
|
||||||
|
|
||||||
def _parse_subtitles(self, video_data, url_key):
|
|
||||||
subtitles = {}
|
|
||||||
for translation in video_data.get('translations', []):
|
|
||||||
vtt_path = translation.get(url_key)
|
|
||||||
if not vtt_path:
|
|
||||||
continue
|
|
||||||
lang = translation.get('language_w3c') or ISO639Utils.long2short(translation['language_medium'])
|
|
||||||
subtitles.setdefault(lang, []).append({
|
|
||||||
'ext': 'vtt',
|
|
||||||
'url': vtt_path,
|
|
||||||
})
|
|
||||||
return subtitles
|
|
||||||
|
|
||||||
def _parse_video_data(self, video_data):
|
|
||||||
video_id = str(video_data['id'])
|
|
||||||
title = video_data['title']
|
|
||||||
|
|
||||||
s3_extracted = False
|
|
||||||
formats = []
|
|
||||||
for source in video_data.get('videos', []):
|
|
||||||
source_url = source.get('url')
|
|
||||||
if not source_url:
|
|
||||||
continue
|
|
||||||
f = {
|
|
||||||
'format_id': source.get('quality_level'),
|
|
||||||
'fps': int_or_none(source.get('frame_rate')),
|
|
||||||
'height': int_or_none(source.get('height')),
|
|
||||||
'tbr': int_or_none(source.get('video_data_rate')),
|
|
||||||
'width': int_or_none(source.get('width')),
|
|
||||||
'url': source_url,
|
|
||||||
}
|
|
||||||
original_filename = source.get('original_filename')
|
|
||||||
if original_filename:
|
|
||||||
if not (f.get('height') and f.get('width')):
|
|
||||||
mobj = re.search(r'_(\d+)x(\d+)', original_filename)
|
|
||||||
if mobj:
|
|
||||||
f.update({
|
|
||||||
'height': int(mobj.group(2)),
|
|
||||||
'width': int(mobj.group(1)),
|
|
||||||
})
|
|
||||||
if original_filename.startswith('s3://') and not s3_extracted:
|
|
||||||
formats.append({
|
|
||||||
'format_id': 'original',
|
|
||||||
'quality': 1,
|
|
||||||
'url': original_filename.replace('s3://', 'https://s3.amazonaws.com/'),
|
|
||||||
})
|
|
||||||
s3_extracted = True
|
|
||||||
formats.append(f)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'description': video_data.get('description'),
|
|
||||||
'thumbnail': video_data.get('thumbnail'),
|
|
||||||
'upload_date': unified_strdate(video_data.get('start_date')),
|
|
||||||
'duration': parse_duration(video_data.get('duration')),
|
|
||||||
'view_count': str_to_int(video_data.get('playcount')),
|
|
||||||
'formats': formats,
|
|
||||||
'subtitles': self._parse_subtitles(video_data, 'vtt'),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVEmbedIE(AdobeTVBaseIE):
|
|
||||||
IE_NAME = 'adobetv:embed'
|
|
||||||
_VALID_URL = r'https?://tv\.adobe\.com/embed/\d+/(?P<id>\d+)'
|
|
||||||
_TEST = {
|
|
||||||
'url': 'https://tv.adobe.com/embed/22/4153',
|
|
||||||
'md5': 'c8c0461bf04d54574fc2b4d07ac6783a',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '4153',
|
|
||||||
'ext': 'flv',
|
|
||||||
'title': 'Creating Graphics Optimized for BlackBerry',
|
|
||||||
'description': 'md5:eac6e8dced38bdaae51cd94447927459',
|
|
||||||
'thumbnail': r're:https?://.*\.jpg$',
|
|
||||||
'upload_date': '20091109',
|
|
||||||
'duration': 377,
|
|
||||||
'view_count': int,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
|
|
||||||
video_data = self._call_api(
|
|
||||||
'episode/' + video_id, video_id, {'disclosure': 'standard'})[0]
|
|
||||||
return self._parse_video_data(video_data)
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVIE(AdobeTVBaseIE):
|
|
||||||
IE_NAME = 'adobetv'
|
IE_NAME = 'adobetv'
|
||||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?watch/(?P<show_urlname>[^/]+)/(?P<id>[^/]+)'
|
|
||||||
|
|
||||||
_TEST = {
|
|
||||||
'url': 'http://tv.adobe.com/watch/the-complete-picture-with-julieanne-kost/quick-tip-how-to-draw-a-circle-around-an-object-in-photoshop/',
|
|
||||||
'md5': '9bc5727bcdd55251f35ad311ca74fa1e',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '10981',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Quick Tip - How to Draw a Circle Around an Object in Photoshop',
|
|
||||||
'description': 'md5:99ec318dc909d7ba2a1f2b038f7d2311',
|
|
||||||
'thumbnail': r're:https?://.*\.jpg$',
|
|
||||||
'upload_date': '20110914',
|
|
||||||
'duration': 60,
|
|
||||||
'view_count': int,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
language, show_urlname, urlname = self._match_valid_url(url).groups()
|
|
||||||
if not language:
|
|
||||||
language = 'en'
|
|
||||||
|
|
||||||
video_data = self._call_api(
|
|
||||||
'episode/get', urlname, {
|
|
||||||
'disclosure': 'standard',
|
|
||||||
'language': language,
|
|
||||||
'show_urlname': show_urlname,
|
|
||||||
'urlname': urlname,
|
|
||||||
})[0]
|
|
||||||
return self._parse_video_data(video_data)
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVPlaylistBaseIE(AdobeTVBaseIE):
|
|
||||||
_PAGE_SIZE = 25
|
|
||||||
|
|
||||||
def _fetch_page(self, display_id, query, page):
|
|
||||||
page += 1
|
|
||||||
query['page'] = page
|
|
||||||
for element_data in self._call_api(
|
|
||||||
self._RESOURCE, display_id, query, f'Download Page {page}'):
|
|
||||||
yield self._process_data(element_data)
|
|
||||||
|
|
||||||
def _extract_playlist_entries(self, display_id, query):
|
|
||||||
return OnDemandPagedList(functools.partial(
|
|
||||||
self._fetch_page, display_id, query), self._PAGE_SIZE)
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVShowIE(AdobeTVPlaylistBaseIE):
|
|
||||||
IE_NAME = 'adobetv:show'
|
|
||||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?show/(?P<id>[^/]+)'
|
|
||||||
|
|
||||||
_TEST = {
|
|
||||||
'url': 'http://tv.adobe.com/show/the-complete-picture-with-julieanne-kost',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '36',
|
|
||||||
'title': 'The Complete Picture with Julieanne Kost',
|
|
||||||
'description': 'md5:fa50867102dcd1aa0ddf2ab039311b27',
|
|
||||||
},
|
|
||||||
'playlist_mincount': 136,
|
|
||||||
}
|
|
||||||
_RESOURCE = 'episode'
|
|
||||||
_process_data = AdobeTVBaseIE._parse_video_data
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
language, show_urlname = self._match_valid_url(url).groups()
|
|
||||||
if not language:
|
|
||||||
language = 'en'
|
|
||||||
query = {
|
|
||||||
'disclosure': 'standard',
|
|
||||||
'language': language,
|
|
||||||
'show_urlname': show_urlname,
|
|
||||||
}
|
|
||||||
|
|
||||||
show_data = self._call_api(
|
|
||||||
'show/get', show_urlname, query)[0]
|
|
||||||
|
|
||||||
return self.playlist_result(
|
|
||||||
self._extract_playlist_entries(show_urlname, query),
|
|
||||||
str_or_none(show_data.get('id')),
|
|
||||||
show_data.get('show_name'),
|
|
||||||
show_data.get('show_description'))
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVChannelIE(AdobeTVPlaylistBaseIE):
|
|
||||||
IE_NAME = 'adobetv:channel'
|
|
||||||
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?channel/(?P<id>[^/]+)(?:/(?P<category_urlname>[^/]+))?'
|
|
||||||
|
|
||||||
_TEST = {
|
|
||||||
'url': 'http://tv.adobe.com/channel/development',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'development',
|
|
||||||
},
|
|
||||||
'playlist_mincount': 96,
|
|
||||||
}
|
|
||||||
_RESOURCE = 'show'
|
|
||||||
|
|
||||||
def _process_data(self, show_data):
|
|
||||||
return self.url_result(
|
|
||||||
show_data['url'], 'AdobeTVShow', str_or_none(show_data.get('id')))
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
language, channel_urlname, category_urlname = self._match_valid_url(url).groups()
|
|
||||||
if not language:
|
|
||||||
language = 'en'
|
|
||||||
query = {
|
|
||||||
'channel_urlname': channel_urlname,
|
|
||||||
'language': language,
|
|
||||||
}
|
|
||||||
if category_urlname:
|
|
||||||
query['category_urlname'] = category_urlname
|
|
||||||
|
|
||||||
return self.playlist_result(
|
|
||||||
self._extract_playlist_entries(channel_urlname, query),
|
|
||||||
channel_urlname)
|
|
||||||
|
|
||||||
|
|
||||||
class AdobeTVVideoIE(AdobeTVBaseIE):
|
|
||||||
IE_NAME = 'adobetv:video'
|
|
||||||
_VALID_URL = r'https?://video\.tv\.adobe\.com/v/(?P<id>\d+)'
|
_VALID_URL = r'https?://video\.tv\.adobe\.com/v/(?P<id>\d+)'
|
||||||
_EMBED_REGEX = [r'<iframe[^>]+src=[\'"](?P<url>(?:https?:)?//video\.tv\.adobe\.com/v/\d+[^"]+)[\'"]']
|
_EMBED_REGEX = [r'<iframe[^>]+src=["\'](?P<url>(?:https?:)?//video\.tv\.adobe\.com/v/\d+)']
|
||||||
|
_TESTS = [{
|
||||||
_TEST = {
|
'url': 'https://video.tv.adobe.com/v/2456',
|
||||||
# From https://helpx.adobe.com/acrobat/how-to/new-experience-acrobat-dc.html?set=acrobat--get-started--essential-beginners
|
|
||||||
'url': 'https://video.tv.adobe.com/v/2456/',
|
|
||||||
'md5': '43662b577c018ad707a63766462b1e87',
|
'md5': '43662b577c018ad707a63766462b1e87',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '2456',
|
'id': '2456',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'New experience with Acrobat DC',
|
'title': 'New experience with Acrobat DC',
|
||||||
'description': 'New experience with Acrobat DC',
|
'description': 'New experience with Acrobat DC',
|
||||||
'duration': 248.667,
|
'duration': 248.522,
|
||||||
|
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||||
},
|
},
|
||||||
}
|
}, {
|
||||||
|
'url': 'https://video.tv.adobe.com/v/3463980/adobe-acrobat',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '3463980',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Adobe Acrobat: How to Customize the Toolbar for Faster PDF Editing',
|
||||||
|
'description': 'md5:94368ab95ae24f9c1bee0cb346e03dc3',
|
||||||
|
'duration': 97.514,
|
||||||
|
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
_WEBPAGE_TESTS = [{
|
||||||
|
# https://video.tv.adobe.com/v/3442499
|
||||||
|
'url': 'https://business.adobe.com/dx-fragments/summit/2025/marquees/S335/ondemand.live.html',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '3442499',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'S335 - Beyond Personalization: Creating Intent-Based Experiences at Scale',
|
||||||
|
'description': 'Beyond Personalization: Creating Intent-Based Experiences at Scale',
|
||||||
|
'duration': 2906.8,
|
||||||
|
'thumbnail': r're:https?://images-tv\.adobe\.com/.+\.jpg',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
video_data = self._search_json(
|
||||||
video_data = self._parse_json(self._search_regex(
|
r'var\s+bridge\s*=', webpage, 'bridged data', video_id)
|
||||||
r'var\s+bridge\s*=\s*([^;]+);', webpage, 'bridged data'), video_id)
|
|
||||||
title = video_data['title']
|
|
||||||
|
|
||||||
formats = []
|
formats = []
|
||||||
sources = video_data.get('sources') or []
|
for source in traverse_obj(video_data, (
|
||||||
for source in sources:
|
'sources', lambda _, v: v['format'] != 'playlist' and url_or_none(v['src']),
|
||||||
source_src = source.get('src')
|
)):
|
||||||
if not source_src:
|
source_url = self._proto_relative_url(source['src'])
|
||||||
continue
|
if determine_ext(source_url) == 'm3u8':
|
||||||
formats.append({
|
fmts = self._extract_m3u8_formats(
|
||||||
'filesize': int_or_none(source.get('kilobytes') or None, invscale=1000),
|
source_url, video_id, 'mp4', m3u8_id='hls', fatal=False)
|
||||||
'format_id': join_nonempty(source.get('format'), source.get('label')),
|
else:
|
||||||
'height': int_or_none(source.get('height') or None),
|
fmts = [{'url': source_url}]
|
||||||
'tbr': int_or_none(source.get('bitrate') or None),
|
|
||||||
'width': int_or_none(source.get('width') or None),
|
|
||||||
'url': source_src,
|
|
||||||
})
|
|
||||||
|
|
||||||
# For both metadata and downloaded files the duration varies among
|
for fmt in fmts:
|
||||||
# formats. I just pick the max one
|
fmt.update(traverse_obj(source, {
|
||||||
duration = max(filter(None, [
|
'duration': ('duration', {float_or_none(scale=1000)}),
|
||||||
float_or_none(source.get('duration'), scale=1000)
|
'filesize': ('kilobytes', {float_or_none(invscale=1000)}),
|
||||||
for source in sources]))
|
'format_id': (('format', 'label'), {str}, all, {lambda x: join_nonempty(*x)}),
|
||||||
|
'height': ('height', {int_or_none}),
|
||||||
|
'tbr': ('bitrate', {int_or_none}),
|
||||||
|
'width': ('width', {int_or_none}),
|
||||||
|
}))
|
||||||
|
formats.extend(fmts)
|
||||||
|
|
||||||
|
subtitles = {}
|
||||||
|
for translation in traverse_obj(video_data, (
|
||||||
|
'translations', lambda _, v: url_or_none(v['vttPath']),
|
||||||
|
)):
|
||||||
|
lang = translation.get('language_w3c') or ISO639Utils.long2short(translation.get('language_medium')) or 'und'
|
||||||
|
subtitles.setdefault(lang, []).append({
|
||||||
|
'ext': 'vtt',
|
||||||
|
'url': self._proto_relative_url(translation['vttPath']),
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
'title': title,
|
'subtitles': subtitles,
|
||||||
'description': video_data.get('description'),
|
**traverse_obj(video_data, {
|
||||||
'thumbnail': video_data.get('video', {}).get('poster'),
|
'title': ('title', {clean_html}),
|
||||||
'duration': duration,
|
'description': ('description', {clean_html}, filter),
|
||||||
'subtitles': self._parse_subtitles(video_data, 'vttPath'),
|
'thumbnail': ('video', 'poster', {self._proto_relative_url}, {url_or_none}),
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -84,6 +84,8 @@ class AdultSwimIE(TurnerBaseIE):
|
|||||||
'skip': '404 Not Found',
|
'skip': '404 Not Found',
|
||||||
}]
|
}]
|
||||||
|
|
||||||
|
_SOFTWARE_STATEMENT = 'eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiIwNjg5ZmU2My00OTc5LTQxZmQtYWYxNC1hYjVlNmJjNWVkZWIiLCJuYmYiOjE1MzcxOTA2NzQsImlzcyI6ImF1dGguYWRvYmUuY29tIiwiaWF0IjoxNTM3MTkwNjc0fQ.Xl3AEduM0s1TxDQ6-XssdKIiLm261hhsEv1C1yo_nitIajZThSI9rXILqtIzO0aujoHhdzUnu_dUCq9ffiSBzEG632tTa1la-5tegHtce80cMhewBN4n2t8n9O5tiaPx8MPY8ALdm5wS7QzWE6DO_LTJKgE8Bl7Yv-CWJT4q4SywtNiQWLVOuhBRnDyfsRezxRwptw8qTn9dv5ZzUrVJaby5fDZ_nOncMKvegOgaKd5KEuCAGQ-mg-PSuValMjGuf6FwDguGaK7IyI5Y2oOrzXmD4Dj7q4WBg8w9QoZhtLeAU56mcsGILolku2R5FHlVLO9xhjResyt-pfmegOkpSw'
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
show_path, episode_path = self._match_valid_url(url).groups()
|
show_path, episode_path = self._match_valid_url(url).groups()
|
||||||
display_id = episode_path or show_path
|
display_id = episode_path or show_path
|
||||||
@@ -152,7 +154,7 @@ def _real_extract(self, url):
|
|||||||
# CDN_TOKEN_APP_ID from:
|
# CDN_TOKEN_APP_ID from:
|
||||||
# https://d2gg02c3xr550i.cloudfront.net/assets/asvp.e9c8bef24322d060ef87.bundle.js
|
# https://d2gg02c3xr550i.cloudfront.net/assets/asvp.e9c8bef24322d060ef87.bundle.js
|
||||||
'appId': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhcHBJZCI6ImFzLXR2ZS1kZXNrdG9wLXB0enQ2bSIsInByb2R1Y3QiOiJ0dmUiLCJuZXR3b3JrIjoiYXMiLCJwbGF0Zm9ybSI6ImRlc2t0b3AiLCJpYXQiOjE1MzI3MDIyNzl9.BzSCk-WYOZ2GMCIaeVb8zWnzhlgnXuJTCu0jGp_VaZE',
|
'appId': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhcHBJZCI6ImFzLXR2ZS1kZXNrdG9wLXB0enQ2bSIsInByb2R1Y3QiOiJ0dmUiLCJuZXR3b3JrIjoiYXMiLCJwbGF0Zm9ybSI6ImRlc2t0b3AiLCJpYXQiOjE1MzI3MDIyNzl9.BzSCk-WYOZ2GMCIaeVb8zWnzhlgnXuJTCu0jGp_VaZE',
|
||||||
}, {
|
}, self._SOFTWARE_STATEMENT, {
|
||||||
'url': url,
|
'url': url,
|
||||||
'site_name': 'AdultSwim',
|
'site_name': 'AdultSwim',
|
||||||
'auth_required': auth,
|
'auth_required': auth,
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
from .theplatform import ThePlatformIE
|
from .theplatform import ThePlatformIE
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
@@ -6,7 +8,6 @@
|
|||||||
remove_start,
|
remove_start,
|
||||||
traverse_obj,
|
traverse_obj,
|
||||||
update_url_query,
|
update_url_query,
|
||||||
urlencode_postdata,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -20,13 +21,13 @@ class AENetworksBaseIE(ThePlatformIE): # XXX: Do not subclass from concrete IE
|
|||||||
_THEPLATFORM_KEY = '43jXaGRQud'
|
_THEPLATFORM_KEY = '43jXaGRQud'
|
||||||
_THEPLATFORM_SECRET = 'S10BPXHMlb'
|
_THEPLATFORM_SECRET = 'S10BPXHMlb'
|
||||||
_DOMAIN_MAP = {
|
_DOMAIN_MAP = {
|
||||||
'history.com': ('HISTORY', 'history'),
|
'history.com': ('HISTORY', 'history', 'eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiI1MzZlMTQ3ZS0zMzFhLTQxY2YtYTMwNC01MDA2NzNlOGYwYjYiLCJuYmYiOjE1Mzg2NjMzMDksImlzcyI6ImF1dGguYWRvYmUuY29tIiwiaWF0IjoxNTM4NjYzMzA5fQ.n24-FVHLGXJe2D4atIQZ700aiXKIajKh5PWFoHJ40Az4itjtwwSFHnvufnoal3T8lYkwNLxce7H-IEGxIykRkZEdwq09pMKMT-ft9ASzE4vQ8fAWbf5ZgDME86x4Jq_YaxkRc9Ne0eShGhl8fgTJHvk07sfWcol61HJ7kU7K8FzzcHR0ucFQgA5VNd8RyjoGWY7c6VxnXR214LOpXsywmit04-vGJC102b_WA2EQfqI93UzG6M6l0EeV4n0_ijP3s8_i8WMJZ_uwnTafCIY6G_731i01dKXDLSFzG1vYglAwDa8DTcdrAAuIFFDF6QNGItCCmwbhjufjmoeVb7R1Gg'),
|
||||||
'aetv.com': ('AETV', 'aetv'),
|
'aetv.com': ('AETV', 'aetv', 'eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiI5Y2IwNjg2Yy03ODUxLTRiZDUtODcyMC00MjNlZTg1YTQ1NzMiLCJuYmYiOjE1Mzg2NjMyOTAsImlzcyI6ImF1dGguYWRvYmUuY29tIiwiaWF0IjoxNTM4NjYzMjkwfQ.T5Elf0X4TndO4NEgqBas1gDxNHGPVk_daO2Ha5FBzVO6xi3zM7eavdAKfYMCN7gpWYJx03iADaVPtczO_t_aGZczDjpwJHgTUzDgvcLZAVsVDqtDIAMy3S846rPgT6UDbVoxurA7B2VTPm9phjrSXhejvd0LBO8MQL4AZ3sy2VmiPJ2noT1ily5PuHCYlkrT1fheO064duR__Cd9DQ5VTMnKjzY3Cx345CEwKDkUk5gwgxhXM-aY0eblehrq8VD81_aRM_O3tvh7nbTydHOnUpV-k_iKVi49gqz7Sf8zb6Zh5z2Uftn3vYCfE5NQuesitoRMnsH17nW7o_D59hkRgg'),
|
||||||
'mylifetime.com': ('LIFETIME', 'lifetime'),
|
'mylifetime.com': ('LIFETIME', 'lifetime', 'eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJmODg0MDM1ZC1mZGRmLTRmYjgtYmRkMC05MzRhZDdiYTAwYTciLCJuYmYiOjE1NDkzOTI2NDQsImlzcyI6ImF1dGguYWRvYmUuY29tIiwiaWF0IjoxNTQ5MzkyNjQ0fQ.vkTIaCpheKdKQd__2-3ec4qkcpbAhyCTvwe5iTl922ItSQfVhpEJG4wseVSNmBTrpBi0hvLedcw6Hj1_UuzBMVuVcCqLprU-pI8recEwL0u7G-eVkylsxe1OTUm1o3V6OykXQ9KlA-QQLL1neUhdhR1n5B1LZ4cmtBmiEpfgf4rFwXD1ScFylIcaWKLBqHoRBNUmxyTmoXXvn_A-GGSj9eCizFzY8W5uBwUcsoiw2Cr1skx7PbB2RSP1I5DsoIJKG-8XV1KS7MWl-fNLjE-hVAsI9znqfEEFcPBiv3LhCP4Nf4OIs7xAselMn0M0c8igRUZhURWX_hdygUAxkbKFtQ'),
|
||||||
'lifetimemovieclub.com': ('LIFETIMEMOVIECLUB', 'lmc'),
|
'fyi.tv': ('FYI', 'fyi', 'eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiIxOGZiOWM3Ny1mYmMzLTQxYTktYmE1Yi1lMzM0ZmUzNzU4NjEiLCJuYmYiOjE1ODc1ODAzNzcsImlzcyI6ImF1dGguYWRvYmUuY29tIiwiaWF0IjoxNTg3NTgwMzc3fQ.AYDuipKswmIfLBfOjHRsfc5fMV5NmJUmiJnkpiep4VEw9QiXkygFj4bN06Si5tFc5Mee5TDrGzDpV6iuKbVpLT5kuqXhAn-Wozf5zKPsg_IpdEKO7gsiCq4calt72ct44KTqtKD_hVcoxQU24_HaJsRgXzu3B-6Ff6UrmsXkyvYifYVC9v2DSkdCuA02_IrlllzVT2kRuefUXgL4vQRtTFf77uYa0RKSTG7uVkiQ_AU41eXevKlO2qgtc14Hk5cZ7-ZNrDyMCXYA5ngdIHP7Gs9PWaFXT36PFHI_rC4EfxUABPzjQFxjpP75aX5qn8SH__HbM9q3hoPWgaEaf76qIQ'),
|
||||||
'fyi.tv': ('FYI', 'fyi'),
|
'lifetimemovieclub.com': ('LIFETIMEMOVIECLUB', 'lmc', None),
|
||||||
'historyvault.com': (None, 'historyvault'),
|
'historyvault.com': (None, 'historyvault', None),
|
||||||
'biography.com': (None, 'biography'),
|
'biography.com': (None, 'biography', None),
|
||||||
}
|
}
|
||||||
|
|
||||||
def _extract_aen_smil(self, smil_url, video_id, auth=None):
|
def _extract_aen_smil(self, smil_url, video_id, auth=None):
|
||||||
@@ -71,7 +72,7 @@ def _extract_aen_smil(self, smil_url, video_id, auth=None):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _extract_aetn_info(self, domain, filter_key, filter_value, url):
|
def _extract_aetn_info(self, domain, filter_key, filter_value, url):
|
||||||
requestor_id, brand = self._DOMAIN_MAP[domain]
|
requestor_id, brand, software_statement = self._DOMAIN_MAP[domain]
|
||||||
result = self._download_json(
|
result = self._download_json(
|
||||||
f'https://feeds.video.aetnd.com/api/v2/{brand}/videos',
|
f'https://feeds.video.aetnd.com/api/v2/{brand}/videos',
|
||||||
filter_value, query={f'filter[{filter_key}]': filter_value})
|
filter_value, query={f'filter[{filter_key}]': filter_value})
|
||||||
@@ -95,7 +96,7 @@ def _extract_aetn_info(self, domain, filter_key, filter_value, url):
|
|||||||
theplatform_metadata.get('AETN$PPL_pplProgramId') or theplatform_metadata.get('AETN$PPL_pplProgramId_OLD'),
|
theplatform_metadata.get('AETN$PPL_pplProgramId') or theplatform_metadata.get('AETN$PPL_pplProgramId_OLD'),
|
||||||
traverse_obj(theplatform_metadata, ('ratings', 0, 'rating')))
|
traverse_obj(theplatform_metadata, ('ratings', 0, 'rating')))
|
||||||
auth = self._extract_mvpd_auth(
|
auth = self._extract_mvpd_auth(
|
||||||
url, video_id, requestor_id, resource)
|
url, video_id, requestor_id, resource, software_statement)
|
||||||
info.update(self._extract_aen_smil(media_url, video_id, auth))
|
info.update(self._extract_aen_smil(media_url, video_id, auth))
|
||||||
info.update({
|
info.update({
|
||||||
'title': title,
|
'title': title,
|
||||||
@@ -110,11 +111,9 @@ class AENetworksIE(AENetworksBaseIE):
|
|||||||
IE_NAME = 'aenetworks'
|
IE_NAME = 'aenetworks'
|
||||||
IE_DESC = 'A+E Networks: A&E, Lifetime, History.com, FYI Network and History Vault'
|
IE_DESC = 'A+E Networks: A&E, Lifetime, History.com, FYI Network and History Vault'
|
||||||
_VALID_URL = AENetworksBaseIE._BASE_URL_REGEX + r'''(?P<id>
|
_VALID_URL = AENetworksBaseIE._BASE_URL_REGEX + r'''(?P<id>
|
||||||
shows/[^/]+/season-\d+/episode-\d+|
|
shows/[^/?#]+/season-\d+/episode-\d+|
|
||||||
(?:
|
(?P<type>movie|special)s/[^/?#]+(?P<extra>/[^/?#]+)?|
|
||||||
(?:movie|special)s/[^/]+|
|
(?:shows/[^/?#]+/)?videos/[^/?#]+
|
||||||
(?:shows/[^/]+/)?videos
|
|
||||||
)/[^/?#&]+
|
|
||||||
)'''
|
)'''
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'http://www.history.com/shows/mountain-men/season-1/episode-1',
|
'url': 'http://www.history.com/shows/mountain-men/season-1/episode-1',
|
||||||
@@ -127,20 +126,18 @@ class AENetworksIE(AENetworksBaseIE):
|
|||||||
'upload_date': '20120529',
|
'upload_date': '20120529',
|
||||||
'uploader': 'AENE-NEW',
|
'uploader': 'AENE-NEW',
|
||||||
'duration': 2592.0,
|
'duration': 2592.0,
|
||||||
'thumbnail': r're:^https?://.*\.jpe?g$',
|
'thumbnail': r're:https?://.+/.+\.jpg',
|
||||||
'chapters': 'count:5',
|
'chapters': 'count:5',
|
||||||
'tags': 'count:14',
|
'tags': 'count:14',
|
||||||
'categories': ['Mountain Men'],
|
'categories': ['Mountain Men'],
|
||||||
'episode_number': 1,
|
'episode_number': 1,
|
||||||
'episode': 'Episode 1',
|
'episode': 'Winter Is Coming',
|
||||||
'season': 'Season 1',
|
'season': 'Season 1',
|
||||||
'season_number': 1,
|
'season_number': 1,
|
||||||
'series': 'Mountain Men',
|
'series': 'Mountain Men',
|
||||||
|
'age_limit': 0,
|
||||||
},
|
},
|
||||||
'params': {
|
'params': {'skip_download': 'm3u8'},
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
'add_ie': ['ThePlatform'],
|
'add_ie': ['ThePlatform'],
|
||||||
'skip': 'Geo-restricted - This content is not available in your location.',
|
'skip': 'Geo-restricted - This content is not available in your location.',
|
||||||
}, {
|
}, {
|
||||||
@@ -154,21 +151,64 @@ class AENetworksIE(AENetworksBaseIE):
|
|||||||
'upload_date': '20160112',
|
'upload_date': '20160112',
|
||||||
'uploader': 'AENE-NEW',
|
'uploader': 'AENE-NEW',
|
||||||
'duration': 1277.695,
|
'duration': 1277.695,
|
||||||
'thumbnail': r're:^https?://.*\.jpe?g$',
|
'thumbnail': r're:https?://.+/.+\.jpg',
|
||||||
'chapters': 'count:4',
|
'chapters': 'count:4',
|
||||||
'tags': 'count:23',
|
'tags': 'count:23',
|
||||||
'episode': 'Episode 1',
|
'episode': 'Inlawful Entry',
|
||||||
'episode_number': 1,
|
'episode_number': 1,
|
||||||
'season': 'Season 9',
|
'season': 'Season 9',
|
||||||
'season_number': 9,
|
'season_number': 9,
|
||||||
'series': 'Duck Dynasty',
|
'series': 'Duck Dynasty',
|
||||||
|
'age_limit': 0,
|
||||||
},
|
},
|
||||||
'params': {
|
'params': {'skip_download': 'm3u8'},
|
||||||
# m3u8 download
|
'add_ie': ['ThePlatform'],
|
||||||
'skip_download': True,
|
}, {
|
||||||
},
|
'url': 'https://play.mylifetime.com/movies/v-c-andrews-web-of-dreams',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '1590627395981',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'VC Andrews\' Web of Dreams',
|
||||||
|
'description': 'md5:2a8ba13ae64271c79eb65c0577d312ce',
|
||||||
|
'uploader': 'AENE-NEW',
|
||||||
|
'age_limit': 14,
|
||||||
|
'duration': 5253.665,
|
||||||
|
'thumbnail': r're:https?://.+/.+\.jpg',
|
||||||
|
'chapters': 'count:8',
|
||||||
|
'tags': ['lifetime', 'mylifetime', 'lifetime channel', "VC Andrews' Web of Dreams"],
|
||||||
|
'series': '',
|
||||||
|
'season': 'Season 0',
|
||||||
|
'season_number': 0,
|
||||||
|
'episode': 'VC Andrews\' Web of Dreams',
|
||||||
|
'episode_number': 0,
|
||||||
|
'timestamp': 1566489703.0,
|
||||||
|
'upload_date': '20190822',
|
||||||
|
},
|
||||||
|
'params': {'skip_download': 'm3u8'},
|
||||||
|
'add_ie': ['ThePlatform'],
|
||||||
|
}, {
|
||||||
|
'url': 'https://www.aetv.com/specials/hunting-jonbenets-killer-the-untold-story',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '1488235587551',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Hunting JonBenet\'s Killer: The Untold Story',
|
||||||
|
'description': 'md5:209869425ee392d74fe29201821e48b4',
|
||||||
|
'uploader': 'AENE-NEW',
|
||||||
|
'age_limit': 14,
|
||||||
|
'duration': 5003.903,
|
||||||
|
'thumbnail': r're:https?://.+/.+\.jpg',
|
||||||
|
'chapters': 'count:10',
|
||||||
|
'tags': 'count:11',
|
||||||
|
'series': '',
|
||||||
|
'season': 'Season 0',
|
||||||
|
'season_number': 0,
|
||||||
|
'episode': 'Hunting JonBenet\'s Killer: The Untold Story',
|
||||||
|
'episode_number': 0,
|
||||||
|
'timestamp': 1554987697.0,
|
||||||
|
'upload_date': '20190411',
|
||||||
|
},
|
||||||
|
'params': {'skip_download': 'm3u8'},
|
||||||
'add_ie': ['ThePlatform'],
|
'add_ie': ['ThePlatform'],
|
||||||
'skip': 'This video is only available for users of participating TV providers.',
|
|
||||||
}, {
|
}, {
|
||||||
'url': 'http://www.fyi.tv/shows/tiny-house-nation/season-1/episode-8',
|
'url': 'http://www.fyi.tv/shows/tiny-house-nation/season-1/episode-8',
|
||||||
'only_matching': True,
|
'only_matching': True,
|
||||||
@@ -196,25 +236,28 @@ class AENetworksIE(AENetworksBaseIE):
|
|||||||
}]
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
domain, canonical = self._match_valid_url(url).groups()
|
domain, canonical, url_type, extra = self._match_valid_url(url).group('domain', 'id', 'type', 'extra')
|
||||||
|
if url_type in ('movie', 'special') and not extra:
|
||||||
|
canonical += f'/full-{url_type}'
|
||||||
return self._extract_aetn_info(domain, 'canonical', '/' + canonical, url)
|
return self._extract_aetn_info(domain, 'canonical', '/' + canonical, url)
|
||||||
|
|
||||||
|
|
||||||
class AENetworksListBaseIE(AENetworksBaseIE):
|
class AENetworksListBaseIE(AENetworksBaseIE):
|
||||||
def _call_api(self, resource, slug, brand, fields):
|
def _call_api(self, resource, slug, brand, fields):
|
||||||
return self._download_json(
|
return self._download_json(
|
||||||
'https://yoga.appsvcs.aetnd.com/graphql',
|
'https://yoga.appsvcs.aetnd.com/graphql', slug,
|
||||||
slug, query={'brand': brand}, data=urlencode_postdata({
|
query={'brand': brand}, headers={'Content-Type': 'application/json'},
|
||||||
|
data=json.dumps({
|
||||||
'query': '''{
|
'query': '''{
|
||||||
%s(slug: "%s") {
|
%s(slug: "%s") {
|
||||||
%s
|
%s
|
||||||
}
|
}
|
||||||
}''' % (resource, slug, fields), # noqa: UP031
|
}''' % (resource, slug, fields), # noqa: UP031
|
||||||
}))['data'][resource]
|
}).encode())['data'][resource]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
domain, slug = self._match_valid_url(url).groups()
|
domain, slug = self._match_valid_url(url).groups()
|
||||||
_, brand = self._DOMAIN_MAP[domain]
|
_, brand, _ = self._DOMAIN_MAP[domain]
|
||||||
playlist = self._call_api(self._RESOURCE, slug, brand, self._FIELDS)
|
playlist = self._call_api(self._RESOURCE, slug, brand, self._FIELDS)
|
||||||
base_url = f'http://watch.{domain}'
|
base_url = f'http://watch.{domain}'
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,24 @@
|
|||||||
import re
|
from .brightcove import BrightcoveNewIE
|
||||||
|
from .common import InfoExtractor
|
||||||
from .theplatform import ThePlatformIE
|
from ..utils.traversal import traverse_obj
|
||||||
from ..utils import (
|
|
||||||
int_or_none,
|
|
||||||
parse_age_limit,
|
|
||||||
try_get,
|
|
||||||
update_url_query,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AMCNetworksIE(ThePlatformIE): # XXX: Do not subclass from concrete IE
|
class AMCNetworksIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://(?:www\.)?(?P<site>amc|bbcamerica|ifc|(?:we|sundance)tv)\.com/(?P<id>(?:movies|shows(?:/[^/]+)+)/[^/?#&]+)'
|
_VALID_URL = r'https?://(?:www\.)?(?:amc|bbcamerica|ifc|(?:we|sundance)tv)\.com/(?P<id>(?:movies|shows(?:/[^/?#]+)+)/[^/?#&]+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.bbcamerica.com/shows/the-graham-norton-show/videos/tina-feys-adorable-airline-themed-family-dinner--51631',
|
'url': 'https://www.amc.com/shows/dark-winds/videos/dark-winds-a-look-at-season-3--1072027',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '4Lq1dzOnZGt0',
|
'id': '6369261343112',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': "The Graham Norton Show - Season 28 - Tina Fey's Adorable Airline-Themed Family Dinner",
|
'title': 'Dark Winds: A Look at Season 3',
|
||||||
'description': "It turns out child stewardesses are very generous with the wine! All-new episodes of 'The Graham Norton Show' premiere Fridays at 11/10c on BBC America.",
|
'uploader_id': '6240731308001',
|
||||||
'upload_date': '20201120',
|
'duration': 176.427,
|
||||||
'timestamp': 1605904350,
|
'thumbnail': r're:https://[^/]+\.boltdns\.net/.+/image\.jpg',
|
||||||
'uploader': 'AMCN',
|
'tags': [],
|
||||||
|
'timestamp': 1740414792,
|
||||||
|
'upload_date': '20250224',
|
||||||
},
|
},
|
||||||
'params': {
|
'params': {'skip_download': 'm3u8'},
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
'skip': '404 Not Found',
|
|
||||||
}, {
|
}, {
|
||||||
'url': 'http://www.bbcamerica.com/shows/the-hunt/full-episodes/season-1/episode-01-the-hardest-challenge',
|
'url': 'http://www.bbcamerica.com/shows/the-hunt/full-episodes/season-1/episode-01-the-hardest-challenge',
|
||||||
'only_matching': True,
|
'only_matching': True,
|
||||||
@@ -52,96 +44,18 @@ class AMCNetworksIE(ThePlatformIE): # XXX: Do not subclass from concrete IE
|
|||||||
'url': 'https://www.sundancetv.com/shows/riviera/full-episodes/season-1/episode-01-episode-1',
|
'url': 'https://www.sundancetv.com/shows/riviera/full-episodes/season-1/episode-01-episode-1',
|
||||||
'only_matching': True,
|
'only_matching': True,
|
||||||
}]
|
}]
|
||||||
_REQUESTOR_ID_MAP = {
|
|
||||||
'amc': 'AMC',
|
|
||||||
'bbcamerica': 'BBCA',
|
|
||||||
'ifc': 'IFC',
|
|
||||||
'sundancetv': 'SUNDANCE',
|
|
||||||
'wetv': 'WETV',
|
|
||||||
}
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
site, display_id = self._match_valid_url(url).groups()
|
display_id = self._match_id(url)
|
||||||
requestor_id = self._REQUESTOR_ID_MAP[site]
|
webpage = self._download_webpage(url, display_id)
|
||||||
page_data = self._download_json(
|
initial_data = self._search_json(
|
||||||
f'https://content-delivery-gw.svc.ds.amcn.com/api/v2/content/amcn/{requestor_id.lower()}/url/{display_id}',
|
r'window\.initialData\s*=\s*JSON\.parse\(String\.raw`', webpage, 'initial data', display_id)
|
||||||
display_id)['data']
|
video_id = traverse_obj(initial_data, ('initialData', 'properties', 'videoId', {str}))
|
||||||
properties = page_data.get('properties') or {}
|
if not video_id: # All locked videos are now DRM-protected
|
||||||
query = {
|
self.report_drm(display_id)
|
||||||
'mbr': 'true',
|
account_id = initial_data['config']['brightcove']['accountId']
|
||||||
'manifest': 'm3u',
|
player_id = initial_data['config']['brightcove']['playerId']
|
||||||
}
|
|
||||||
|
|
||||||
video_player_count = 0
|
return self.url_result(
|
||||||
try:
|
f'https://players.brightcove.net/{account_id}/{player_id}_default/index.html?videoId={video_id}',
|
||||||
for v in page_data['children']:
|
BrightcoveNewIE, video_id)
|
||||||
if v.get('type') == 'video-player':
|
|
||||||
release_pid = v['properties']['currentVideo']['meta']['releasePid']
|
|
||||||
tp_path = 'M_UwQC/' + release_pid
|
|
||||||
media_url = 'https://link.theplatform.com/s/' + tp_path
|
|
||||||
video_player_count += 1
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
if video_player_count > 1:
|
|
||||||
self.report_warning(
|
|
||||||
f'The JSON data has {video_player_count} video players. Only one will be extracted')
|
|
||||||
|
|
||||||
# Fall back to videoPid if releasePid not found.
|
|
||||||
# TODO: Fall back to videoPid if releasePid manifest uses DRM.
|
|
||||||
if not video_player_count:
|
|
||||||
tp_path = 'M_UwQC/media/' + properties['videoPid']
|
|
||||||
media_url = 'https://link.theplatform.com/s/' + tp_path
|
|
||||||
|
|
||||||
theplatform_metadata = self._download_theplatform_metadata(tp_path, display_id)
|
|
||||||
info = self._parse_theplatform_metadata(theplatform_metadata)
|
|
||||||
video_id = theplatform_metadata['pid']
|
|
||||||
title = theplatform_metadata['title']
|
|
||||||
rating = try_get(
|
|
||||||
theplatform_metadata, lambda x: x['ratings'][0]['rating'])
|
|
||||||
video_category = properties.get('videoCategory')
|
|
||||||
if video_category and video_category.endswith('-Auth'):
|
|
||||||
resource = self._get_mvpd_resource(
|
|
||||||
requestor_id, title, video_id, rating)
|
|
||||||
query['auth'] = self._extract_mvpd_auth(
|
|
||||||
url, video_id, requestor_id, resource)
|
|
||||||
media_url = update_url_query(media_url, query)
|
|
||||||
formats, subtitles = self._extract_theplatform_smil(
|
|
||||||
media_url, video_id)
|
|
||||||
|
|
||||||
thumbnails = []
|
|
||||||
thumbnail_urls = [properties.get('imageDesktop')]
|
|
||||||
if 'thumbnail' in info:
|
|
||||||
thumbnail_urls.append(info.pop('thumbnail'))
|
|
||||||
for thumbnail_url in thumbnail_urls:
|
|
||||||
if not thumbnail_url:
|
|
||||||
continue
|
|
||||||
mobj = re.search(r'(\d+)x(\d+)', thumbnail_url)
|
|
||||||
thumbnails.append({
|
|
||||||
'url': thumbnail_url,
|
|
||||||
'width': int(mobj.group(1)) if mobj else None,
|
|
||||||
'height': int(mobj.group(2)) if mobj else None,
|
|
||||||
})
|
|
||||||
|
|
||||||
info.update({
|
|
||||||
'age_limit': parse_age_limit(rating),
|
|
||||||
'formats': formats,
|
|
||||||
'id': video_id,
|
|
||||||
'subtitles': subtitles,
|
|
||||||
'thumbnails': thumbnails,
|
|
||||||
})
|
|
||||||
ns_keys = theplatform_metadata.get('$xmlns', {}).keys()
|
|
||||||
if ns_keys:
|
|
||||||
ns = next(iter(ns_keys))
|
|
||||||
episode = theplatform_metadata.get(ns + '$episodeTitle') or None
|
|
||||||
episode_number = int_or_none(
|
|
||||||
theplatform_metadata.get(ns + '$episode'))
|
|
||||||
season_number = int_or_none(
|
|
||||||
theplatform_metadata.get(ns + '$season'))
|
|
||||||
series = theplatform_metadata.get(ns + '$show') or None
|
|
||||||
info.update({
|
|
||||||
'episode': episode,
|
|
||||||
'episode_number': episode_number,
|
|
||||||
'season_number': season_number,
|
|
||||||
'series': series,
|
|
||||||
})
|
|
||||||
return info
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user