mirror of
https://github.com/yt-dlp/yt-dlp
synced 2025-12-16 22:25:40 +07:00
Compare commits
974 Commits
2024.07.16
...
017d76edcf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
017d76edcf | ||
|
|
56ea3a00ea | ||
|
|
2a777ecbd5 | ||
|
|
023e4db9af | ||
|
|
4433b3a217 | ||
|
|
419776ecf5 | ||
|
|
2801650268 | ||
|
|
26c2545b87 | ||
|
|
12d411722a | ||
|
|
e564b4a808 | ||
|
|
715af0c636 | ||
|
|
0c696239ef | ||
|
|
3cb5e4db54 | ||
|
|
6842620d56 | ||
|
|
20f83f208e | ||
|
|
c2e7e9cdb2 | ||
|
|
2c9f0c3456 | ||
|
|
0eed3fe530 | ||
|
|
a4c72acc46 | ||
|
|
9daba4f442 | ||
|
|
854fded114 | ||
|
|
5f66ac71f6 | ||
|
|
4cb5e191ef | ||
|
|
6ee6a6fc58 | ||
|
|
23f1ab3469 | ||
|
|
af285016d2 | ||
|
|
1dd84b9d1c | ||
|
|
b333ef1b3f | ||
|
|
4e680db150 | ||
|
|
45a3b42bb9 | ||
|
|
d6aa8c235d | ||
|
|
947e788340 | ||
|
|
5977782142 | ||
|
|
335653be82 | ||
|
|
bf7e04e9d8 | ||
|
|
cb78440e46 | ||
|
|
b46c572b26 | ||
|
|
7eff676183 | ||
|
|
1ac7e6005c | ||
|
|
f3597cfafc | ||
|
|
3ef867451c | ||
|
|
ade8c2b36f | ||
|
|
19c5d7c530 | ||
|
|
e6414d64e7 | ||
|
|
c96e9291ab | ||
|
|
4b4223b436 | ||
|
|
c63b4e2a2b | ||
|
|
f3c255b63b | ||
|
|
646904cd3a | ||
|
|
a0bda3b786 | ||
|
|
228ae9f0f2 | ||
|
|
f87cfadb5c | ||
|
|
a1d6351c3f | ||
|
|
a86eeaadf2 | ||
|
|
5dde0d0c9f | ||
|
|
5767fb4ab1 | ||
|
|
73fd850d17 | ||
|
|
8636a9bac3 | ||
|
|
7af6d81f35 | ||
|
|
a56217f9f6 | ||
|
|
afc44022d0 | ||
|
|
1d2f0edaf9 | ||
|
|
fa35eb27ea | ||
|
|
ffb7b7f446 | ||
|
|
61cf34f544 | ||
|
|
e8a6b1ca92 | ||
|
|
10dea209d2 | ||
|
|
52f3c56e83 | ||
|
|
79f9232ffb | ||
|
|
ee98be4ad7 | ||
|
|
c0c9f30695 | ||
|
|
cacd1630a1 | ||
|
|
6224a38988 | ||
|
|
d6ee677253 | ||
|
|
0046fbcbfc | ||
|
|
ee3a106f34 | ||
|
|
6d05cee4df | ||
|
|
1c2ad94353 | ||
|
|
808b1fed76 | ||
|
|
73922e66e4 | ||
|
|
d9e3011fd1 | ||
|
|
70f1098312 | ||
|
|
a75399d89f | ||
|
|
c9356f308d | ||
|
|
de7b3c0705 | ||
|
|
2c9091e355 | ||
|
|
dfc0a84c19 | ||
|
|
fe5ae54a7b | ||
|
|
78748b506f | ||
|
|
c7bda2192a | ||
|
|
4e6a693057 | ||
|
|
264044286d | ||
|
|
a98e7f9f58 | ||
|
|
0ea5d5882d | ||
|
|
cdc533b114 | ||
|
|
c2e124881f | ||
|
|
ad55bfcfb7 | ||
|
|
739125d40f | ||
|
|
5f94f05490 | ||
|
|
5d7678195a | ||
|
|
eafedc2181 | ||
|
|
8eb8695139 | ||
|
|
df160ab18d | ||
|
|
6d41aaf21c | ||
|
|
a6673a8e82 | ||
|
|
87be1bb96a | ||
|
|
ccc25d6710 | ||
|
|
5513036104 | ||
|
|
bd5ed90419 | ||
|
|
88e2a2de8e | ||
|
|
12b57d2858 | ||
|
|
b7b7910d96 | ||
|
|
50e452fd7d | ||
|
|
94c5622be9 | ||
|
|
7df5acc546 | ||
|
|
4429fd0450 | ||
|
|
2e81e298cd | ||
|
|
7f5d9f8543 | ||
|
|
f8750504c2 | ||
|
|
8821682f15 | ||
|
|
08d7899683 | ||
|
|
98b6b0d339 | ||
|
|
bf5d18016b | ||
|
|
4bc19adc87 | ||
|
|
b2c01d0498 | ||
|
|
e123a48f11 | ||
|
|
820c6e2445 | ||
|
|
677997d84e | ||
|
|
b81e9272dc | ||
|
|
df4b4e8ccf | ||
|
|
f3829463c7 | ||
|
|
ae3923b6b2 | ||
|
|
8ab262c66b | ||
|
|
e2d37bcc8e | ||
|
|
eb4b3a5fc7 | ||
|
|
65e90aea29 | ||
|
|
17bfaa53ed | ||
|
|
8cb037c0b0 | ||
|
|
7d9e48b22a | ||
|
|
f5cb721185 | ||
|
|
83b8409366 | ||
|
|
ba80446855 | ||
|
|
22ea0688ed | ||
|
|
5c1abcdc49 | ||
|
|
3d9a88bd8e | ||
|
|
9def9a4b0e | ||
|
|
679587dac7 | ||
|
|
a1c98226a4 | ||
|
|
c8ede5f34d | ||
|
|
a183837ec8 | ||
|
|
067062bb87 | ||
|
|
8597a4331e | ||
|
|
48a214bef4 | ||
|
|
6a763a55d8 | ||
|
|
e6e6b51214 | ||
|
|
7c9b10ebc8 | ||
|
|
cd94e70040 | ||
|
|
7c27965ff6 | ||
|
|
50136eeeb3 | ||
|
|
603acdff07 | ||
|
|
d925e92b71 | ||
|
|
ed24640943 | ||
|
|
76bb46002c | ||
|
|
1e28f6bf74 | ||
|
|
0b51005b48 | ||
|
|
223baa81f6 | ||
|
|
18fe696df9 | ||
|
|
487a90c8ef | ||
|
|
8cd37b85d4 | ||
|
|
5c7ad68ff1 | ||
|
|
1ddbd033f0 | ||
|
|
fec30c56f0 | ||
|
|
d6950c27af | ||
|
|
3bd9154412 | ||
|
|
8f4a908300 | ||
|
|
f1ba9f4ddb | ||
|
|
5c8bcfdbc6 | ||
|
|
895e762a83 | ||
|
|
39b7b8ddc7 | ||
|
|
526410b4af | ||
|
|
f29acc4a6e | ||
|
|
4dbe96459d | ||
|
|
a03c37b44e | ||
|
|
fcea3edb5c | ||
|
|
415b6d9ca8 | ||
|
|
575753b9f3 | ||
|
|
c2fc4f3e7f | ||
|
|
07247d6c20 | ||
|
|
f63a7e41d1 | ||
|
|
7b8a8abb98 | ||
|
|
a97f4cb57e | ||
|
|
d154dc3dcf | ||
|
|
438d3f06b3 | ||
|
|
74b4b3b005 | ||
|
|
36e873822b | ||
|
|
d3d1ac8eb2 | ||
|
|
86d74e5cf0 | ||
|
|
6ca9165648 | ||
|
|
82a1390204 | ||
|
|
7540aa1da1 | ||
|
|
35da8df4f8 | ||
|
|
8df121ba59 | ||
|
|
471a2b60e0 | ||
|
|
df0553153e | ||
|
|
7bc53ae799 | ||
|
|
d8200ff0a4 | ||
|
|
0f6b915822 | ||
|
|
374ea049f5 | ||
|
|
6f4c1bb593 | ||
|
|
c22660aed5 | ||
|
|
404bd889d0 | ||
|
|
edf55e8184 | ||
|
|
8a8861d538 | ||
|
|
70f5669951 | ||
|
|
6ae3543d5a | ||
|
|
770119bdd1 | ||
|
|
8e3f8065af | ||
|
|
aea85d525e | ||
|
|
f2919bd28e | ||
|
|
681ed2153d | ||
|
|
bdeb3eb3f2 | ||
|
|
b7de89c910 | ||
|
|
5e4ceb35cf | ||
|
|
e98695549e | ||
|
|
bf366517ef | ||
|
|
c76ce28e06 | ||
|
|
e8d49b1c7f | ||
|
|
a6df5e8a58 | ||
|
|
e8d2807296 | ||
|
|
fe53ebe5b6 | ||
|
|
662af5bb83 | ||
|
|
8175f3738f | ||
|
|
1e0c77ddcc | ||
|
|
e651a53a2f | ||
|
|
f799a4b472 | ||
|
|
38c2bf4026 | ||
|
|
6ff135c319 | ||
|
|
cd31c319e3 | ||
|
|
6539ee1947 | ||
|
|
43dedbe639 | ||
|
|
05e553e9d1 | ||
|
|
1c6068af99 | ||
|
|
71f30921a2 | ||
|
|
121647705a | ||
|
|
70d7687487 | ||
|
|
42ca3d601e | ||
|
|
62e2a9c0d5 | ||
|
|
28b68f6875 | ||
|
|
682334e4b3 | ||
|
|
b831406a1d | ||
|
|
23c658b9cb | ||
|
|
cc5a5caac5 | ||
|
|
66aa21dc5a | ||
|
|
57186f958f | ||
|
|
daa1859be1 | ||
|
|
e8c2bf798b | ||
|
|
1fe83b0111 | ||
|
|
30302df22b | ||
|
|
3e609b2ced | ||
|
|
d399505fdf | ||
|
|
61d4cd0bc0 | ||
|
|
4385480795 | ||
|
|
485de69dbf | ||
|
|
0adeb1e54b | ||
|
|
afaf60d9fd | ||
|
|
7e3f48d64d | ||
|
|
59765ecbc0 | ||
|
|
c59ad2b066 | ||
|
|
eed94c7306 | ||
|
|
3e918d825d | ||
|
|
035b1ece8f | ||
|
|
9951fdd0d0 | ||
|
|
959ac99e98 | ||
|
|
d88b304d44 | ||
|
|
b15aa8d772 | ||
|
|
d3edc5d52a | ||
|
|
060c6a4501 | ||
|
|
6be26626f7 | ||
|
|
ef103b2d11 | ||
|
|
3e49bc8a1b | ||
|
|
2ac3eb9837 | ||
|
|
8820101aa3 | ||
|
|
a4561c7a66 | ||
|
|
32809eb2da | ||
|
|
f9dff95cb1 | ||
|
|
790c286ce3 | ||
|
|
87e3dc8c7f | ||
|
|
1a8474c3ca | ||
|
|
09982bc33e | ||
|
|
c8329fc572 | ||
|
|
1f27a9f8ba | ||
|
|
4919051e44 | ||
|
|
5f951ce929 | ||
|
|
28bf46b7da | ||
|
|
b8abd255e4 | ||
|
|
c1ac543c81 | ||
|
|
dcc4cba39e | ||
|
|
3a84be9d16 | ||
|
|
d42a6ff0c4 | ||
|
|
ade876efb3 | ||
|
|
7e0af2b1f0 | ||
|
|
d57a0b5aa7 | ||
|
|
6fb3947c0d | ||
|
|
9f54ea3898 | ||
|
|
07d1d85f63 | ||
|
|
5d693446e8 | ||
|
|
23e9389f93 | ||
|
|
6d39c420f7 | ||
|
|
85c3fa1925 | ||
|
|
b4b4486eff | ||
|
|
630f3389c3 | ||
|
|
a6db1d297a | ||
|
|
0f33950c77 | ||
|
|
b5fea53f20 | ||
|
|
5245231e4a | ||
|
|
3ae61e0f31 | ||
|
|
a5d697f62d | ||
|
|
6e5bee418b | ||
|
|
5b57b72c1a | ||
|
|
2aaf1aa71d | ||
|
|
7b4c96e089 | ||
|
|
0b359b184d | ||
|
|
805519bfaa | ||
|
|
aa9f1f4d57 | ||
|
|
fd36b8f31b | ||
|
|
99093e96fd | ||
|
|
7c49a93788 | ||
|
|
884f35d54a | ||
|
|
c23d837b65 | ||
|
|
a7113722ec | ||
|
|
0e68332bcb | ||
|
|
422cc8cb2f | ||
|
|
fca94ac5d6 | ||
|
|
b342d27f3f | ||
|
|
b6328ca050 | ||
|
|
0b41746964 | ||
|
|
c316416b97 | ||
|
|
e99c0b838a | ||
|
|
c2ff2dbaec | ||
|
|
ca5cce5b07 | ||
|
|
f3008bc5f8 | ||
|
|
30fa54280b | ||
|
|
b018784498 | ||
|
|
11b9416e10 | ||
|
|
35fc33fbc5 | ||
|
|
b16722ede8 | ||
|
|
500761e41a | ||
|
|
2ba5391cd6 | ||
|
|
e9f157669e | ||
|
|
958153a226 | ||
|
|
1b88384634 | ||
|
|
7b81634fb1 | ||
|
|
7e2504f941 | ||
|
|
4bd9a7ade7 | ||
|
|
b5bd057fe8 | ||
|
|
5e292baad6 | ||
|
|
0a6b104489 | ||
|
|
06c1a8cdff | ||
|
|
99b85ac102 | ||
|
|
eff0759705 | ||
|
|
1838a1ce5d | ||
|
|
2600849bad | ||
|
|
3bd3029160 | ||
|
|
a4ce4327c9 | ||
|
|
c57412d1f9 | ||
|
|
5b559d0072 | ||
|
|
8f94b76cbf | ||
|
|
ff6f94041a | ||
|
|
73bf102116 | ||
|
|
1722c55400 | ||
|
|
e6bd4a3da2 | ||
|
|
51887484e4 | ||
|
|
ba090caeaa | ||
|
|
339614a173 | ||
|
|
aa863ddab9 | ||
|
|
db162b76f6 | ||
|
|
e3c605a61f | ||
|
|
97ddfefeb4 | ||
|
|
a8bf0011bd | ||
|
|
13e5516271 | ||
|
|
03dba2012d | ||
|
|
5d96527be8 | ||
|
|
1fd0e88b67 | ||
|
|
231349786e | ||
|
|
f37d599a69 | ||
|
|
9e38b273b7 | ||
|
|
4e7c1ea346 | ||
|
|
e1b6062f8c | ||
|
|
c723c4e5e7 | ||
|
|
148a1eb4c5 | ||
|
|
85c8a405e3 | ||
|
|
943083edcd | ||
|
|
3fe72e9eea | ||
|
|
d30a49742c | ||
|
|
6d265388c6 | ||
|
|
a9b3700698 | ||
|
|
201812100f | ||
|
|
cc749a8a3b | ||
|
|
f7bbf5a617 | ||
|
|
b5be29fa58 | ||
|
|
6121559e02 | ||
|
|
2e5bf002da | ||
|
|
6693d66033 | ||
|
|
b094747e93 | ||
|
|
98f8eec956 | ||
|
|
0daddc780d | ||
|
|
2d7949d564 | ||
|
|
ed108b3ea4 | ||
|
|
eee90acc47 | ||
|
|
711c5d5d09 | ||
|
|
89c1b349ad | ||
|
|
0ee1102268 | ||
|
|
7794374de8 | ||
|
|
538eb30567 | ||
|
|
f8051e3a61 | ||
|
|
52f9729c9a | ||
|
|
1a8a03ea8d | ||
|
|
e0d6c08229 | ||
|
|
53ea743a9c | ||
|
|
415b4c9f95 | ||
|
|
7977b329ed | ||
|
|
e491fd4d09 | ||
|
|
32ed5f107c | ||
|
|
167d7a9f0f | ||
|
|
83fabf3524 | ||
|
|
00b1bec552 | ||
|
|
c7e575e316 | ||
|
|
31e090cb78 | ||
|
|
545c1a5b6f | ||
|
|
f569be4602 | ||
|
|
2685654a37 | ||
|
|
abf58dcd6a | ||
|
|
20f288bdc2 | ||
|
|
f475e8b529 | ||
|
|
41c0a1fb89 | ||
|
|
a7d9a5eb79 | ||
|
|
586b557b12 | ||
|
|
317f4b8006 | ||
|
|
6839276496 | ||
|
|
cbcfe6378d | ||
|
|
7dbb47f84f | ||
|
|
464c84fedf | ||
|
|
7a7b85c901 | ||
|
|
d880e06080 | ||
|
|
ded11ebc9a | ||
|
|
ea8498ed53 | ||
|
|
b26bc32579 | ||
|
|
f123cc83b3 | ||
|
|
0feec6dc13 | ||
|
|
1d0f6539c4 | ||
|
|
17cf9088d0 | ||
|
|
9064d2482d | ||
|
|
8f303afb43 | ||
|
|
5328eda882 | ||
|
|
b77e5a553a | ||
|
|
505b400795 | ||
|
|
74fc2ae12c | ||
|
|
7be14109a6 | ||
|
|
61c9a938b3 | ||
|
|
fd8394bc50 | ||
|
|
22ac81a069 | ||
|
|
25cd7c1ecb | ||
|
|
28f04e8a5e | ||
|
|
a3e91df30a | ||
|
|
80736b9c90 | ||
|
|
1ae6bff564 | ||
|
|
b37ff4de5b | ||
|
|
3690e91265 | ||
|
|
8cb08028f5 | ||
|
|
1cf39ddf3d | ||
|
|
c2d6659d10 | ||
|
|
26feac3dd1 | ||
|
|
70599e53b7 | ||
|
|
8d127b18f8 | ||
|
|
7d05aa99c6 | ||
|
|
36da6360e1 | ||
|
|
e7e3b7a55c | ||
|
|
dce8234624 | ||
|
|
2381881fe5 | ||
|
|
741fd809bc | ||
|
|
34a061a295 | ||
|
|
9032f98136 | ||
|
|
de271a06fd | ||
|
|
d596824c2f | ||
|
|
88eb1e7a9a | ||
|
|
f5a37ea40e | ||
|
|
f07ee91c71 | ||
|
|
ed8ad1b4d6 | ||
|
|
839d643253 | ||
|
|
f5736bb35b | ||
|
|
9d26daa04a | ||
|
|
73a26f9ee6 | ||
|
|
4e69a626cc | ||
|
|
77aa15e98f | ||
|
|
cb271d445b | ||
|
|
ceab4d5ed6 | ||
|
|
ed6c6d7eef | ||
|
|
f484c51599 | ||
|
|
72ba487930 | ||
|
|
74e90dd9b8 | ||
|
|
1d45e30537 | ||
|
|
3c1c75ecb8 | ||
|
|
7faa18b83d | ||
|
|
a473e59233 | ||
|
|
45f01de00e | ||
|
|
db6d1f145a | ||
|
|
a3f2b54c25 | ||
|
|
91832111a1 | ||
|
|
425017531f | ||
|
|
58d0c83457 | ||
|
|
4ebf41309d | ||
|
|
e1847535e2 | ||
|
|
5361a7c6e2 | ||
|
|
349f36606f | ||
|
|
5e457af57f | ||
|
|
61046c3161 | ||
|
|
07f04005e4 | ||
|
|
e465b078ea | ||
|
|
d63696f23a | ||
|
|
bb321cfdc3 | ||
|
|
5fc521cbd0 | ||
|
|
f033d86b96 | ||
|
|
9a1ec1d36e | ||
|
|
2956035912 | ||
|
|
22e34adbd7 | ||
|
|
6a6d97b2cb | ||
|
|
3ddbebb3c6 | ||
|
|
48be862b32 | ||
|
|
a8b9ff3c2a | ||
|
|
6eaa574c82 | ||
|
|
ecee97b4fa | ||
|
|
a550dfc904 | ||
|
|
336b33e72f | ||
|
|
9dde546e7e | ||
|
|
66e0bab814 | ||
|
|
801afeac91 | ||
|
|
86ab79e1a5 | ||
|
|
3396eb50dc | ||
|
|
5086d4aed6 | ||
|
|
9491b44032 | ||
|
|
b7fbb5a0a1 | ||
|
|
4054a2b623 | ||
|
|
b9c979461b | ||
|
|
9d5e6de2e7 | ||
|
|
9bf23902ce | ||
|
|
be5af3f9e9 | ||
|
|
fe4f14b836 | ||
|
|
b872ffec50 | ||
|
|
e2dfccaf80 | ||
|
|
b4488a9e12 | ||
|
|
f36e4b6e65 | ||
|
|
983095485c | ||
|
|
bbada3ec07 | ||
|
|
8305df0001 | ||
|
|
7223d29569 | ||
|
|
f5fb2229e6 | ||
|
|
89a68c4857 | ||
|
|
9b868518a1 | ||
|
|
2ee3a0aff9 | ||
|
|
01a8be4c23 | ||
|
|
ebac65aa9e | ||
|
|
4815dac131 | ||
|
|
95f8df2f79 | ||
|
|
e67d786c7c | ||
|
|
d9a53cc1e6 | ||
|
|
83b119dadb | ||
|
|
06f6de78db | ||
|
|
3380febe99 | ||
|
|
be0d819e11 | ||
|
|
df9ebeec00 | ||
|
|
17504f2535 | ||
|
|
4432a9390c | ||
|
|
05c8023a27 | ||
|
|
bd0a668169 | ||
|
|
b8b4754704 | ||
|
|
9d70abe4de | ||
|
|
8eb9c1bf3b | ||
|
|
42b7440963 | ||
|
|
172d5fcd77 | ||
|
|
7d18fed8f1 | ||
|
|
79ec2fdff7 | ||
|
|
3042afb5fe | ||
|
|
ad60137c14 | ||
|
|
0bb3978862 | ||
|
|
7508e34f20 | ||
|
|
9807181cfb | ||
|
|
7126b47260 | ||
|
|
eb1417786a | ||
|
|
6933f5670c | ||
|
|
26a502fc72 | ||
|
|
652827d5a0 | ||
|
|
0e1697232f | ||
|
|
9f77e04c76 | ||
|
|
c034d65548 | ||
|
|
480125560a | ||
|
|
a59abe0636 | ||
|
|
a90641c836 | ||
|
|
65c3c58c0a | ||
|
|
99ea297875 | ||
|
|
6deeda5c11 | ||
|
|
7f3006eb0c | ||
|
|
4445f37a7a | ||
|
|
3a1583ca75 | ||
|
|
a3e0c7d3b2 | ||
|
|
f7a1f2d813 | ||
|
|
9deed13d7c | ||
|
|
c2e6e1d5f7 | ||
|
|
9c3e8b1696 | ||
|
|
4985a40417 | ||
|
|
01a63629a2 | ||
|
|
be69468752 | ||
|
|
5271ef48c6 | ||
|
|
d48e612609 | ||
|
|
5c4c2ddfaa | ||
|
|
ec17fb16e8 | ||
|
|
e7882b682b | ||
|
|
6ca23ffaa4 | ||
|
|
f53553087d | ||
|
|
4ecb833472 | ||
|
|
2081634474 | ||
|
|
c987be0acb | ||
|
|
14cd7f3443 | ||
|
|
4ca8c44a07 | ||
|
|
241ace4f10 | ||
|
|
1295bbedd4 | ||
|
|
19edaa44fc | ||
|
|
10b7ff68e9 | ||
|
|
0d9f061d38 | ||
|
|
517ddf3c3f | ||
|
|
03c3d70577 | ||
|
|
f8d0161455 | ||
|
|
d59f14a0a7 | ||
|
|
817483ccc6 | ||
|
|
861aeec449 | ||
|
|
57c717fee4 | ||
|
|
9fb8ab2ff6 | ||
|
|
18a28514e3 | ||
|
|
5ff7a43623 | ||
|
|
3b45319344 | ||
|
|
421bc72103 | ||
|
|
d4f5be1735 | ||
|
|
797d2472a2 | ||
|
|
3b99a0f0e0 | ||
|
|
c709cc41cb | ||
|
|
4850ce91d1 | ||
|
|
e2e73b5c65 | ||
|
|
13825ab778 | ||
|
|
bc88b904cd | ||
|
|
76ac023ff0 | ||
|
|
b3007c44cd | ||
|
|
78912ed9c8 | ||
|
|
bb69f5dab7 | ||
|
|
6d304133ab | ||
|
|
9ff330948c | ||
|
|
fc12e724a3 | ||
|
|
61ae5dc34a | ||
|
|
4651679104 | ||
|
|
ff44ed5306 | ||
|
|
cdcf1e8672 | ||
|
|
f7d071e8aa | ||
|
|
45732e2590 | ||
|
|
7bfb4f72e4 | ||
|
|
5d904b077d | ||
|
|
e7cc02b14d | ||
|
|
f0d4b8a5d6 | ||
|
|
6b91d232e3 | ||
|
|
de82acf876 | ||
|
|
326fb1ffaf | ||
|
|
ccda63934d | ||
|
|
9676b05715 | ||
|
|
f9f24ae376 | ||
|
|
af2c821d74 | ||
|
|
1ef3ee7500 | ||
|
|
20c765d023 | ||
|
|
3fc4608656 | ||
|
|
68221ecc87 | ||
|
|
de30f652ff | ||
|
|
89198bb23b | ||
|
|
a567f97b62 | ||
|
|
1643686104 | ||
|
|
bbc7591d3b | ||
|
|
c8541f8b13 | ||
|
|
a3c0321825 | ||
|
|
dade5e35c8 | ||
|
|
e2ef4fece6 | ||
|
|
1f489f4a45 | ||
|
|
75079f4e3f | ||
|
|
712d2abb32 | ||
|
|
8346b54915 | ||
|
|
1f4e1e85a2 | ||
|
|
763ed06ee6 | ||
|
|
3c14e9191f | ||
|
|
0b6b7742c2 | ||
|
|
3905f64920 | ||
|
|
65cf46cddd | ||
|
|
9f42e68a74 | ||
|
|
6fc85f617a | ||
|
|
d298693b1b | ||
|
|
09a6c68712 | ||
|
|
1a8851b689 | ||
|
|
b91c3925c2 | ||
|
|
3d3ee458c1 | ||
|
|
2037a6414f | ||
|
|
5421669626 | ||
|
|
dc3c4fddcc | ||
|
|
5460cd9189 | ||
|
|
f6c73aad5f | ||
|
|
d5e2a379f2 | ||
|
|
bc262bcad4 | ||
|
|
f4d3e9e6dc | ||
|
|
6fef824025 | ||
|
|
4bd2655398 | ||
|
|
a95ee6d880 | ||
|
|
4c85ccd136 | ||
|
|
2feb28028e | ||
|
|
fca3eb5f8b | ||
|
|
2e49c789d3 | ||
|
|
354cb4026c | ||
|
|
cfa76f35d2 | ||
|
|
2b67ac300a | ||
|
|
c038a7b187 | ||
|
|
a13a336aa6 | ||
|
|
dc16876480 | ||
|
|
f05a1cd149 | ||
|
|
d8fb349086 | ||
|
|
2bea793632 | ||
|
|
62cba8a1be | ||
|
|
239f5f36fe | ||
|
|
0d146c1e36 | ||
|
|
cd0f934604 | ||
|
|
360aed810a | ||
|
|
00dcde7286 | ||
|
|
910ecc4229 | ||
|
|
0a0d80800b | ||
|
|
e0500cbf79 | ||
|
|
4b5eec0aaa | ||
|
|
fe70f20aed | ||
|
|
c7316373c0 | ||
|
|
e0f1ae813b | ||
|
|
7d6c259a03 | ||
|
|
16336c51d0 | ||
|
|
ccf0a6b86b | ||
|
|
f919729538 | ||
|
|
7ea2787920 | ||
|
|
f7257588bd | ||
|
|
da252d9d32 | ||
|
|
e079ffbda6 | ||
|
|
2009cb27e1 | ||
|
|
f351440f1d | ||
|
|
f9d98509a8 | ||
|
|
37cd7660ea | ||
|
|
d867f99622 | ||
|
|
10fc719bc7 | ||
|
|
eb15fd5a32 | ||
|
|
7cecd299e4 | ||
|
|
52c0ffe40a | ||
|
|
637d62a3a9 | ||
|
|
f95a92b3d0 | ||
|
|
1d253b0a27 | ||
|
|
720b3dc453 | ||
|
|
d215fba7ed | ||
|
|
8388ec256f | ||
|
|
6365e92589 | ||
|
|
70c55cb08f | ||
|
|
c699bafc50 | ||
|
|
eb64ae7d5d | ||
|
|
c014fbcddc | ||
|
|
39d79c9b9c | ||
|
|
f2a4983df7 | ||
|
|
bacc31b05a | ||
|
|
a9f85670d0 | ||
|
|
6b43a8d84b | ||
|
|
2db8c2e7d5 | ||
|
|
f9c8deb4e5 | ||
|
|
0ec9bfed4d | ||
|
|
c673731061 | ||
|
|
e398217aae | ||
|
|
c39016f66d | ||
|
|
b83ca24eb7 | ||
|
|
240a7d43c8 | ||
|
|
f13df591d4 | ||
|
|
be3579aaf0 | ||
|
|
85fdc66b6e | ||
|
|
282e19db82 | ||
|
|
197d0b03b6 | ||
|
|
b03267bf06 | ||
|
|
4613096f2e | ||
|
|
838f4385de | ||
|
|
d135823137 | ||
|
|
59f8dd8239 | ||
|
|
9c6534da81 | ||
|
|
a403dcf9be | ||
|
|
754940e9a5 | ||
|
|
beae2db127 | ||
|
|
3945677a75 | ||
|
|
b103aca24d | ||
|
|
5c7a5aaab2 | ||
|
|
422195ec70 | ||
|
|
a6783a3b99 | ||
|
|
428ffb75aa | ||
|
|
b6dc2c49e8 | ||
|
|
76802f4613 | ||
|
|
d569a88452 | ||
|
|
88402b714e | ||
|
|
5bc5fb2835 | ||
|
|
f93c16395c | ||
|
|
f101e5d34c | ||
|
|
330335386d | ||
|
|
0a3991edae | ||
|
|
5c880ef42e | ||
|
|
21cdcf03a2 | ||
|
|
6abef74232 | ||
|
|
9acf79c91a | ||
|
|
57212a5f97 | ||
|
|
c29f5a7fae | ||
|
|
ec9b25043f | ||
|
|
914af9a0cf | ||
|
|
c998238c2e | ||
|
|
d784464399 | ||
|
|
dd2e244469 | ||
|
|
ea9e35d85f | ||
|
|
87884f1558 | ||
|
|
2a246749ec | ||
|
|
67adeb7bab | ||
|
|
a886cf3e90 | ||
|
|
e68b4c19af | ||
|
|
b8635c1d47 | ||
|
|
46fe60ff19 | ||
|
|
0b7ec08816 | ||
|
|
40054cb4a7 | ||
|
|
fed53d70bd | ||
|
|
ec2f4bf082 | ||
|
|
87408ccfd7 | ||
|
|
c4d95f67dd | ||
|
|
5af774d7a3 | ||
|
|
0f593dca9f | ||
|
|
8de431ec97 | ||
|
|
679c68240a | ||
|
|
3148c1822f | ||
|
|
7af1ddaaf2 | ||
|
|
fbc66e3ab3 | ||
|
|
64d84d75ca | ||
|
|
dcfeea4dd5 | ||
|
|
cba7868502 | ||
|
|
d710a6ca7c | ||
|
|
85b87c991a | ||
|
|
16eb28026a | ||
|
|
1a830394a2 | ||
|
|
edfd095b19 | ||
|
|
c5f0f58efd | ||
|
|
babb709605 | ||
|
|
9d43dcb2c5 | ||
|
|
f433871424 | ||
|
|
ceaea731b6 | ||
|
|
5310fa87f6 | ||
|
|
983c58fb7a | ||
|
|
1a176d874e | ||
|
|
079a7bc334 | ||
|
|
cf85cba5d9 | ||
|
|
4b7bec66d8 | ||
|
|
ccb23e1bac | ||
|
|
3a193346ee | ||
|
|
de2062753a | ||
|
|
e59c82a74c | ||
|
|
f91645acea | ||
|
|
b31b81d85f | ||
|
|
6328e2e67a | ||
|
|
5945fc1945 | ||
|
|
c6387abc1a | ||
|
|
cca534cd9e | ||
|
|
7509d692b3 | ||
|
|
63da31b3b2 | ||
|
|
8f4ea14680 | ||
|
|
a1b4ac2b8e | ||
|
|
c08e0b20b5 | ||
|
|
0aa4426e9a | ||
|
|
48d629d461 | ||
|
|
7f909046f4 | ||
|
|
eabb4680fd | ||
|
|
1d84b780cf | ||
|
|
9f5c9a9089 | ||
|
|
a2000bc857 | ||
|
|
5a8a05aebb | ||
|
|
ad0b857f45 | ||
|
|
124f058b54 | ||
|
|
416686ed0c | ||
|
|
b37417e4f9 | ||
|
|
28b0ecba2a | ||
|
|
e2b3634e29 | ||
|
|
fb8b7f226d | ||
|
|
b397a64691 | ||
|
|
5bb1aa04da | ||
|
|
fa2be9a7c6 | ||
|
|
3ad0b7f422 | ||
|
|
4a9bc8c363 | ||
|
|
a06bb58679 | ||
|
|
a555389c9b | ||
|
|
173d54c151 | ||
|
|
4a27b8f092 | ||
|
|
41a241ca6f | ||
|
|
3aa0156e05 | ||
|
|
300c91274f | ||
|
|
d8d473002b | ||
|
|
36f9e602ad | ||
|
|
7adff8caf1 | ||
|
|
fa83d0b36b | ||
|
|
c8c078fe28 | ||
|
|
325001317d | ||
|
|
cc85596d5b | ||
|
|
0e1b941c6b | ||
|
|
3dfd720d09 | ||
|
|
25c1cdaa26 | ||
|
|
d02df303d8 | ||
|
|
5d0176547f | ||
|
|
409f8e9e3b | ||
|
|
b4760c778d | ||
|
|
9431777b4c | ||
|
|
3a3bd00037 | ||
|
|
d1c4d88b2d | ||
|
|
46f4c80bc3 | ||
|
|
0fba08485b | ||
|
|
b6200bdcf3 | ||
|
|
e8e6a982a1 | ||
|
|
7e41628ff5 | ||
|
|
e6f48ca808 | ||
|
|
4115c24d15 | ||
|
|
ad9a8115aa | ||
|
|
41be32e78c | ||
|
|
e978c312d6 | ||
|
|
6f9e653743 | ||
|
|
f0bb28504c | ||
|
|
bef1d4d6fc | ||
|
|
c8d096c5ce | ||
|
|
a7d3235c84 | ||
|
|
d62fef7e07 | ||
|
|
cc88a54bb1 | ||
|
|
b43bd86485 | ||
|
|
232e6db30c | ||
|
|
49f3741a82 | ||
|
|
a065086640 | ||
|
|
4d92312083 | ||
|
|
fc5eecfa31 | ||
|
|
406f4c2e47 | ||
|
|
c86891eb94 | ||
|
|
bb8bf1db99 | ||
|
|
e7d73bc453 | ||
|
|
919540a964 | ||
|
|
0088c6de23 | ||
|
|
abe10131fc | ||
|
|
ffd7781d65 | ||
|
|
efb42763de | ||
|
|
bb3936ae2b | ||
|
|
d19fcb9342 | ||
|
|
011b4a04db | ||
|
|
7e3e4779ad | ||
|
|
5260696b1c | ||
|
|
2f1ddfe12a | ||
|
|
4b69e1b53e | ||
|
|
0e539617a4 | ||
|
|
fe15d3178e | ||
|
|
94a1c5e642 | ||
|
|
2b6df93a24 | ||
|
|
ef36d517f9 | ||
|
|
6daf2c27c0 | ||
|
|
28d485714f | ||
|
|
0b77286184 | ||
|
|
6b1e430d8e | ||
|
|
f0993391e6 | ||
|
|
1a34a802f4 | ||
|
|
a0a1bc3d8d | ||
|
|
2f97779f33 | ||
|
|
713b4cd18f | ||
|
|
a3bab4752a | ||
|
|
e046db8a11 |
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -10,4 +10,4 @@ liberapay: # Replace with a single Liberapay username
|
|||||||
issuehunt: # Replace with a single IssueHunt username
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
otechie: # Replace with a single Otechie username
|
otechie: # Replace with a single Otechie username
|
||||||
|
|
||||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators']
|
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers']
|
||||||
|
|||||||
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -2,13 +2,11 @@ name: Broken site support
|
|||||||
description: Report issue with yt-dlp on a supported site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -24,9 +22,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@@ -47,6 +45,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@@ -63,14 +63,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
@@ -2,13 +2,11 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -24,9 +22,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@@ -59,6 +57,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@@ -75,14 +75,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
@@ -1,14 +1,12 @@
|
|||||||
name: Site feature request
|
name: Site feature request
|
||||||
description: Request a new functionality for a supported site
|
description: Request new functionality for a site supported by yt-dlp
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -22,9 +20,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@@ -55,6 +53,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@@ -71,14 +71,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -2,13 +2,11 @@ name: Core bug report
|
|||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -20,13 +18,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
@@ -40,6 +34,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@@ -56,14 +52,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -1,14 +1,12 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new feature unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -22,9 +20,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
@@ -38,6 +36,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
@@ -52,14 +52,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -1,14 +1,12 @@
|
|||||||
name: Ask question
|
name: Ask question
|
||||||
description: Ask yt-dlp related question
|
description: Ask a question about using yt-dlp
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
@@ -28,9 +26,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
@@ -44,6 +42,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
@@ -58,14 +58,15 @@ body:
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
|
|||||||
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,5 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Get help from the community on Discord
|
- name: Get help on Discord
|
||||||
url: https://discord.gg/H5MNcFW63r
|
url: https://discord.gg/H5MNcFW63r
|
||||||
about: Join the yt-dlp Discord for community-powered support!
|
about: Join the yt-dlp Discord server for support and discussion
|
||||||
- name: Matrix Bridge to the Discord server
|
|
||||||
url: https://matrix.to/#/#yt-dlp:matrix.org
|
|
||||||
about: For those who do not want to use Discord
|
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
name: Site feature request
|
name: Site feature request
|
||||||
description: Request a new functionality for a supported site
|
description: Request new functionality for a site supported by yt-dlp
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@@ -16,9 +16,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
|||||||
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -14,13 +14,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new feature unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@@ -16,9 +16,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
|
|||||||
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Ask question
|
name: Ask question
|
||||||
description: Ask yt-dlp related question
|
description: Ask a question about using yt-dlp
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@@ -22,9 +22,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
|
|||||||
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,17 @@
|
|||||||
|
<!--
|
||||||
**IMPORTANT**: PRs without the template will be CLOSED
|
**IMPORTANT**: PRs without the template will be CLOSED
|
||||||
|
|
||||||
|
Due to the high volume of pull requests, it may be a while before your PR is reviewed.
|
||||||
|
Please try to keep your pull request focused on a single bugfix or new feature.
|
||||||
|
Pull requests with a vast scope and/or very large diff will take much longer to review.
|
||||||
|
It is recommended for new contributors to stick to smaller pull requests, so you can receive much more immediate feedback as you familiarize yourself with the codebase.
|
||||||
|
|
||||||
|
PLEASE AVOID FORCE-PUSHING after opening a PR, as it makes reviewing more difficult.
|
||||||
|
-->
|
||||||
|
|
||||||
### Description of your *pull request* and other information
|
### Description of your *pull request* and other information
|
||||||
|
|
||||||
<!--
|
ADD DETAILED DESCRIPTION HERE
|
||||||
|
|
||||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
|
||||||
|
|
||||||
-->
|
|
||||||
|
|
||||||
ADD DESCRIPTION HERE
|
|
||||||
|
|
||||||
Fixes #
|
Fixes #
|
||||||
|
|
||||||
@@ -16,24 +19,23 @@ ### Description of your *pull request* and other information
|
|||||||
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
# PLEASE FOLLOW THE GUIDE BELOW
|
# PLEASE FOLLOW THE GUIDE BELOW
|
||||||
|
|
||||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||||
- Use *Preview* tab to see how your *pull request* will actually look like
|
- Use *Preview* tab to see what your *pull request* will actually look like
|
||||||
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Before submitting a *pull request* make sure you have:
|
### Before submitting a *pull request* make sure you have:
|
||||||
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
||||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||||
|
|
||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
|
||||||
### What is the purpose of your *pull request*?
|
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||||
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||||
- [ ] Core bug fix/improvement
|
- [ ] Core bug fix/improvement
|
||||||
|
|||||||
22
.github/actionlint.yml
vendored
Normal file
22
.github/actionlint.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
config-variables:
|
||||||
|
- KEEP_CACHE_WARM
|
||||||
|
- PUSH_VERSION_COMMIT
|
||||||
|
- UPDATE_TO_VERIFICATION
|
||||||
|
- PYPI_PROJECT
|
||||||
|
- PYPI_SUFFIX
|
||||||
|
- NIGHTLY_PYPI_PROJECT
|
||||||
|
- NIGHTLY_PYPI_SUFFIX
|
||||||
|
- NIGHTLY_ARCHIVE_REPO
|
||||||
|
- BUILD_NIGHTLY
|
||||||
|
- MASTER_PYPI_PROJECT
|
||||||
|
- MASTER_PYPI_SUFFIX
|
||||||
|
- MASTER_ARCHIVE_REPO
|
||||||
|
- BUILD_MASTER
|
||||||
|
- ISSUE_LOCKDOWN
|
||||||
|
- SANITIZE_COMMENT
|
||||||
|
|
||||||
|
paths:
|
||||||
|
.github/workflows/build.yml:
|
||||||
|
ignore:
|
||||||
|
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||||
|
- '.+SC1090.+'
|
||||||
627
.github/workflows/build.yml
vendored
627
.github/workflows/build.yml
vendored
@@ -9,31 +9,27 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: stable
|
default: stable
|
||||||
type: string
|
type: string
|
||||||
|
origin:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
unix:
|
unix:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos_legacy:
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
windows:
|
windows:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows32:
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
origin:
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY:
|
GPG_SIGNING_KEY:
|
||||||
required: false
|
required: false
|
||||||
@@ -43,7 +39,9 @@ on:
|
|||||||
version:
|
version:
|
||||||
description: |
|
description: |
|
||||||
VERSION: yyyy.mm.dd[.rev] or rev
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
required: true
|
(default: auto-generated)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
channel:
|
channel:
|
||||||
description: |
|
description: |
|
||||||
@@ -55,37 +53,26 @@ on:
|
|||||||
description: yt-dlp, yt-dlp.tar.gz
|
description: yt-dlp, yt-dlp.tar.gz
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
description: yt-dlp_linux
|
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
description: yt-dlp_linux_armv7l.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
|
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
description: yt-dlp_macos, yt-dlp_macos.zip
|
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos_legacy:
|
|
||||||
description: yt-dlp_macos_legacy
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
windows:
|
windows:
|
||||||
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows32:
|
|
||||||
description: yt-dlp_x86.exe
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
origin:
|
|
||||||
description: Origin
|
|
||||||
required: false
|
|
||||||
default: 'current repo'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- 'current repo'
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -94,44 +81,153 @@ jobs:
|
|||||||
process:
|
process:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
origin: ${{ steps.process_origin.outputs.origin }}
|
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||||
|
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||||
|
version: ${{ steps.process_inputs.outputs.version }}
|
||||||
|
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Process origin
|
- name: Process inputs
|
||||||
id: process_origin
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
shell: python
|
||||||
run: |
|
run: |
|
||||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||||
|
version = INPUTS.get('version')
|
||||||
|
if version and '.' not in version:
|
||||||
|
# build.yml was dispatched with only a revision as the version input value
|
||||||
|
version_parts = [*timestamp.split('.')[:3], version]
|
||||||
|
elif not version:
|
||||||
|
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||||
|
version_parts = timestamp.split('.')[:4]
|
||||||
|
else:
|
||||||
|
# build.yml was called or dispatched with a complete version input value
|
||||||
|
version_parts = version.split('.')
|
||||||
|
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||||
|
outputs = {
|
||||||
|
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'version': '.'.join(version_parts),
|
||||||
|
}
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
|
||||||
|
- name: Build Linux matrix
|
||||||
|
id: linux_matrix
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
PYTHON_VERSION: '3.13'
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
EXE_MAP = {
|
||||||
|
'linux': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
}, {
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
}],
|
||||||
|
'linux_armv7l': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'armv7l',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
'qemu_platform': 'linux/arm/v7',
|
||||||
|
'onefile': False,
|
||||||
|
'cache_requirements': True,
|
||||||
|
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||||
|
}],
|
||||||
|
'musllinux': [{
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
'python_version': '3.14',
|
||||||
|
}, {
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
'python_version': '3.14',
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||||
|
if not matrix:
|
||||||
|
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||||
|
matrix = [EXE_MAP['linux'][0]]
|
||||||
|
for exe in matrix:
|
||||||
|
exe['exe'] = '_'.join(filter(None, (
|
||||||
|
'yt-dlp',
|
||||||
|
exe['os'],
|
||||||
|
exe['arch'] != 'x86_64' and exe['arch'],
|
||||||
|
)))
|
||||||
|
exe.setdefault('qemu_platform', None)
|
||||||
|
exe.setdefault('onefile', True)
|
||||||
|
exe.setdefault('onedir', True)
|
||||||
|
exe.setdefault('cache_requirements', False)
|
||||||
|
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||||
|
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||||
|
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||||
|
print('skipping linux job')
|
||||||
|
else:
|
||||||
|
print(json.dumps(matrix, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write(f'matrix={json.dumps(matrix)}')
|
||||||
|
|
||||||
unix:
|
unix:
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.unix
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Needed for changelog
|
fetch-depth: 0 # Needed for changelog
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt -y install zip pandoc man sed
|
sudo apt -y install zip pandoc man sed
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build Unix platform-independent binary
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all-extra tar
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
run: |
|
run: |
|
||||||
chmod +x ./yt-dlp
|
chmod +x ./yt-dlp
|
||||||
cp ./yt-dlp ./yt-dlp_downgraded
|
cp ./yt-dlp ./yt-dlp_downgraded
|
||||||
version="$(./yt-dlp --version)"
|
version="$(./yt-dlp --version)"
|
||||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "${version}" != "${downgraded_version}" ]]
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -141,97 +237,74 @@ jobs:
|
|||||||
yt-dlp.tar.gz
|
yt-dlp.tar.gz
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
linux_static:
|
linux:
|
||||||
|
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||||
|
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.linux_static
|
runs-on: ${{ matrix.runner }}
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Build static executable
|
|
||||||
env:
|
|
||||||
channel: ${{ inputs.channel }}
|
|
||||||
origin: ${{ needs.process.outputs.origin }}
|
|
||||||
version: ${{ inputs.version }}
|
|
||||||
run: |
|
|
||||||
mkdir ~/build
|
|
||||||
cd bundle/docker
|
|
||||||
docker compose up --build static
|
|
||||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
chmod +x ~/build/yt-dlp_linux
|
|
||||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
|
||||||
version="$(~/build/yt-dlp_linux --version)"
|
|
||||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
~/build/yt-dlp_linux
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
linux_arm:
|
|
||||||
needs: process
|
|
||||||
if: inputs.linux_arm
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write # for creating cache
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
architecture:
|
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||||
- armv7
|
env:
|
||||||
- aarch64
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
EXE_NAME: ${{ matrix.exe }}
|
||||||
|
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||||
|
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||||
|
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||||
|
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
|
||||||
path: ./repo
|
|
||||||
- name: Virtualized Install, Prepare & Build
|
|
||||||
uses: yt-dlp/run-on-arch-action@v2
|
|
||||||
with:
|
|
||||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
|
||||||
env: |
|
|
||||||
GITHUB_WORKFLOW: build
|
|
||||||
githubToken: ${{ github.token }} # To cache image
|
|
||||||
arch: ${{ matrix.architecture }}
|
|
||||||
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
|
||||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
|
||||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
|
||||||
apt update
|
|
||||||
apt -y install zlib1g-dev libffi-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
|
||||||
python3.8 -m pip install -U pip setuptools wheel
|
|
||||||
# Cannot access any files from the repo directory at this stage
|
|
||||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage cffi
|
|
||||||
|
|
||||||
|
- name: Cache requirements
|
||||||
|
if: matrix.cache_requirements
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v4
|
||||||
|
env:
|
||||||
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
venv
|
||||||
|
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
if: matrix.qemu_platform
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: ${{ matrix.qemu_platform }}
|
||||||
|
|
||||||
|
- name: Build executable
|
||||||
|
env:
|
||||||
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||||
run: |
|
run: |
|
||||||
cd repo
|
mkdir -p ./venv
|
||||||
python3.8 devscripts/install_deps.py -o --include build
|
mkdir -p ./dist
|
||||||
python3.8 devscripts/install_deps.py --include pyinstaller --include secretstorage # Cached version may be out of date
|
pushd bundle/docker
|
||||||
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
python3.8 devscripts/make_lazy_extractors.py
|
popd
|
||||||
python3.8 -m bundle.pyinstaller
|
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||||
|
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
|
||||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
|
||||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
|
||||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
|
||||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
|
||||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Verify executable in container
|
||||||
|
env:
|
||||||
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||||
|
run: |
|
||||||
|
cd bundle/docker
|
||||||
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: build-bin-linux_${{ matrix.architecture }}
|
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||||
path: | # run-on-arch-action designates armv7l as armv7
|
path: |
|
||||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
dist/${{ matrix.exe }}*
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
@@ -239,36 +312,45 @@ jobs:
|
|||||||
if: inputs.macos
|
if: inputs.macos
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
actions: write # For cleaning up cache
|
runs-on: macos-14
|
||||||
runs-on: macos-12
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
# NB: Building universal2 does not work with python from actions/setup-python
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
|
|
||||||
- name: Restore cached requirements
|
- name: Cache requirements
|
||||||
id: restore-cache
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache@v4
|
||||||
env:
|
env:
|
||||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/yt-dlp-build-venv
|
~/yt-dlp-build-venv
|
||||||
key: cache-reqs-${{ github.job }}
|
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ github.job }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
|
# We need to use system Python in order to roll our own universal2 curl_cffi wheel
|
||||||
|
brew uninstall --ignore-dependencies python3
|
||||||
python3 -m venv ~/yt-dlp-build-venv
|
python3 -m venv ~/yt-dlp-build-venv
|
||||||
source ~/yt-dlp-build-venv/bin/activate
|
source ~/yt-dlp-build-venv/bin/activate
|
||||||
python3 devscripts/install_deps.py -o --include build
|
python3 devscripts/install_deps.py --only-optional-groups --include-group build
|
||||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt
|
||||||
# We need to ignore wheels otherwise we break universal2 builds
|
# We need to ignore wheels otherwise we break universal2 builds
|
||||||
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
||||||
# We need to fuse our own universal2 wheels for curl_cffi
|
# We need to fuse our own universal2 wheels for curl_cffi
|
||||||
python3 -m pip install -U delocate
|
python3 -m pip install -U 'delocate==0.11.0'
|
||||||
mkdir curl_cffi_whls curl_cffi_universal2
|
mkdir curl_cffi_whls curl_cffi_universal2
|
||||||
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt
|
||||||
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||||
python3 -m pip download \
|
python3 -m pip download \
|
||||||
--only-binary=:all: \
|
--only-binary=:all: \
|
||||||
@@ -297,7 +379,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -312,7 +394,7 @@ jobs:
|
|||||||
chmod +x ./dist/yt-dlp_macos
|
chmod +x ./dist/yt-dlp_macos
|
||||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||||
version="$(./dist/yt-dlp_macos --version)"
|
version="$(./dist/yt-dlp_macos --version)"
|
||||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
@@ -325,179 +407,124 @@ jobs:
|
|||||||
dist/yt-dlp_macos.zip
|
dist/yt-dlp_macos.zip
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
- name: Cleanup cache
|
|
||||||
if: steps.restore-cache.outputs.cache-hit == 'true'
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
cache_key: cache-reqs-${{ github.job }}
|
|
||||||
repository: ${{ github.repository }}
|
|
||||||
branch: ${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
gh extension install actions/gh-actions-cache
|
|
||||||
gh actions-cache delete "${cache_key}" -R "${repository}" -B "${branch}" --confirm
|
|
||||||
|
|
||||||
- name: Cache requirements
|
|
||||||
uses: actions/cache/save@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/yt-dlp-build-venv
|
|
||||||
key: cache-reqs-${{ github.job }}
|
|
||||||
|
|
||||||
macos_legacy:
|
|
||||||
needs: process
|
|
||||||
if: inputs.macos_legacy
|
|
||||||
runs-on: macos-12
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Install Python
|
|
||||||
# We need the official Python, because the GA ones only support newer macOS versions
|
|
||||||
env:
|
|
||||||
PYTHON_VERSION: 3.10.5
|
|
||||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
|
||||||
run: |
|
|
||||||
# Hack to get the latest patch version. Uncomment if needed
|
|
||||||
#brew install python@3.10
|
|
||||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
|
||||||
curl "https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg" -o "python.pkg"
|
|
||||||
sudo installer -pkg python.pkg -target /
|
|
||||||
python3 --version
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
brew install coreutils
|
|
||||||
python3 devscripts/install_deps.py --user -o --include build
|
|
||||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
|
||||||
python3 devscripts/make_lazy_extractors.py
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
python3 -m bundle.pyinstaller
|
|
||||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
chmod +x ./dist/yt-dlp_macos_legacy
|
|
||||||
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
|
||||||
version="$(./dist/yt-dlp_macos_legacy --version)"
|
|
||||||
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
dist/yt-dlp_macos_legacy
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
|
name: windows (${{ matrix.arch }})
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.windows
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
permissions:
|
||||||
|
contents: read
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- arch: 'x64'
|
||||||
|
runner: windows-2025
|
||||||
|
python_version: '3.10'
|
||||||
|
platform_tag: win_amd64
|
||||||
|
pyi_version: '6.17.0'
|
||||||
|
pyi_tag: '2025.11.29.054325'
|
||||||
|
pyi_hash: e28cc13e4ad0cc74330d832202806d0c1976e9165da6047309348ca663c0ed3d
|
||||||
|
- arch: 'x86'
|
||||||
|
runner: windows-2025
|
||||||
|
python_version: '3.10'
|
||||||
|
platform_tag: win32
|
||||||
|
pyi_version: '6.17.0'
|
||||||
|
pyi_tag: '2025.11.29.054325'
|
||||||
|
pyi_hash: c00f600c17de3bdd589f043f60ab64fc34fcba6dd902ad973af9c8afc74f80d1
|
||||||
|
- arch: 'arm64'
|
||||||
|
runner: windows-11-arm
|
||||||
|
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||||
|
platform_tag: win_arm64
|
||||||
|
pyi_version: '6.17.0'
|
||||||
|
pyi_tag: '2025.11.29.054325'
|
||||||
|
pyi_hash: a2033b18b4f7bc6108b5fd76a92c6c1de0a12ec4fe98a23396a9f978cb4b7d7b
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||||
|
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||||
|
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with: # 3.8 is used for Win7 support
|
with:
|
||||||
python-version: "3.8"
|
python-version: ${{ matrix.python_version }}
|
||||||
|
architecture: ${{ matrix.arch }}
|
||||||
|
|
||||||
|
- name: Cache requirements
|
||||||
|
id: cache-venv
|
||||||
|
if: matrix.arch == 'arm64'
|
||||||
|
uses: actions/cache@v4
|
||||||
|
env:
|
||||||
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
/yt-dlp-build-venv
|
||||||
|
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||||
|
${{ env.BASE_CACHE_KEY }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
env:
|
||||||
python devscripts/install_deps.py -o --include build
|
ARCH: ${{ matrix.arch }}
|
||||||
python devscripts/install_deps.py --include curl-cffi
|
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-6.7.0-py3-none-any.whl"
|
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
python -m venv /yt-dlp-build-venv
|
||||||
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
|
python -m pip install -U pip
|
||||||
|
# Install custom PyInstaller build and verify hash
|
||||||
|
mkdir /pyi-wheels
|
||||||
|
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||||
|
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||||
|
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||||
|
if ("${Env:ARCH}" -eq "x86") {
|
||||||
|
python devscripts/install_deps.py
|
||||||
|
} else {
|
||||||
|
python devscripts/install_deps.py --include-group curl-cffi
|
||||||
|
}
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
python -m bundle.pyinstaller
|
python -m bundle.pyinstaller
|
||||||
python -m bundle.pyinstaller --onedir
|
python -m bundle.pyinstaller --onedir
|
||||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_real.exe
|
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
|
||||||
|
|
||||||
- name: Install Requirements (py2exe)
|
|
||||||
run: |
|
|
||||||
python devscripts/install_deps.py --include py2exe
|
|
||||||
- name: Build (py2exe)
|
|
||||||
run: |
|
|
||||||
python -m bundle.py2exe
|
|
||||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
|
||||||
Move-Item ./dist/yt-dlp_real.exe ./dist/yt-dlp.exe
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
$name = "yt-dlp${Env:SUFFIX}"
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
$version = & "./dist/${name}.exe" --version
|
$version = & "./dist/${name}.exe" --version
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
if ($version -eq $downgraded_version) {
|
if ($version -eq $downgraded_version) {
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: build-bin-${{ github.job }}
|
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp.exe
|
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||||
dist/yt-dlp_min.exe
|
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||||
dist/yt-dlp_win.zip
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
windows32:
|
|
||||||
needs: process
|
|
||||||
if: inputs.windows32
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.8"
|
|
||||||
architecture: "x86"
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
python devscripts/install_deps.py -o --include build
|
|
||||||
python devscripts/install_deps.py
|
|
||||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-6.7.0-py3-none-any.whl"
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
|
||||||
python devscripts/make_lazy_extractors.py
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
python -m bundle.pyinstaller
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
foreach ($name in @("yt-dlp_x86")) {
|
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
|
||||||
$version = & "./dist/${name}.exe" --version
|
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
|
||||||
if ($version -eq $downgraded_version) {
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
dist/yt-dlp_x86.exe
|
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
meta_files:
|
meta_files:
|
||||||
@@ -505,15 +532,13 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- process
|
- process
|
||||||
- unix
|
- unix
|
||||||
- linux_static
|
- linux
|
||||||
- linux_arm
|
|
||||||
- macos
|
- macos
|
||||||
- macos_legacy
|
|
||||||
- windows
|
- windows
|
||||||
- windows32
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-bin-*
|
pattern: build-bin-*
|
||||||
@@ -535,21 +560,45 @@ jobs:
|
|||||||
cat >> _update_spec << EOF
|
cat >> _update_spec << EOF
|
||||||
# This file is used for regulating self-update
|
# This file is used for regulating self-update
|
||||||
lock 2022.08.18.36 .+ Python 3\.6
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lock 2023.11.16 zip Python 3\.7
|
||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lock 2024.10.22 py2exe .+
|
||||||
|
lock 2024.10.22 zip Python 3\.8
|
||||||
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lock 2025.08.27 linux_armv7l_exe .+
|
||||||
|
lock 2025.10.14 zip Python 3\.9
|
||||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Sign checksum files
|
- name: Sign checksum files
|
||||||
env:
|
env:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
if: env.GPG_SIGNING_KEY != ''
|
if: env.GPG_SIGNING_KEY
|
||||||
run: |
|
run: |
|
||||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
for signfile in ./SHA*SUMS; do
|
for signfile in ./SHA*SUMS; do
|
||||||
|
|||||||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Keep cache warm
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
if: |
|
||||||
|
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: '999999'
|
||||||
|
channel: stable
|
||||||
|
origin: ${{ github.repository }}
|
||||||
|
unix: false
|
||||||
|
linux: false
|
||||||
|
linux_armv7l: true
|
||||||
|
musllinux: false
|
||||||
|
macos: true
|
||||||
|
windows: true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
77
.github/workflows/challenge-tests.yml
vendored
Normal file
77
.github/workflows/challenge-tests.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
name: Challenge Tests
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/challenge-tests.yml
|
||||||
|
- test/test_jsc/*.py
|
||||||
|
- yt_dlp/extractor/youtube/jsc/**.js
|
||||||
|
- yt_dlp/extractor/youtube/jsc/**.py
|
||||||
|
- yt_dlp/extractor/youtube/pot/**.py
|
||||||
|
- yt_dlp/utils/_jsruntime.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/challenge-tests.yml
|
||||||
|
- test/test_jsc/*.py
|
||||||
|
- yt_dlp/extractor/youtube/jsc/**.js
|
||||||
|
- yt_dlp/extractor/youtube/jsc/**.py
|
||||||
|
- yt_dlp/extractor/youtube/pot/**.py
|
||||||
|
- yt_dlp/utils/_jsruntime.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: challenge-tests-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
name: Challenge Tests
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest]
|
||||||
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
|
env:
|
||||||
|
QJS_VERSION: '2025-04-26' # Earliest version with rope strings
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Install Deno
|
||||||
|
uses: denoland/setup-deno@v2
|
||||||
|
with:
|
||||||
|
deno-version: '2.0.0' # minimum supported version
|
||||||
|
- name: Install Bun
|
||||||
|
uses: oven-sh/setup-bun@v2
|
||||||
|
with:
|
||||||
|
# minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0
|
||||||
|
bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }}
|
||||||
|
- name: Install Node
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: '20.0' # minimum supported version
|
||||||
|
- name: Install QuickJS (Linux)
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
run: |
|
||||||
|
wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip
|
||||||
|
unzip quickjs.zip qjs
|
||||||
|
sudo install qjs /usr/local/bin/qjs
|
||||||
|
- name: Install QuickJS (Windows)
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
shell: pwsh
|
||||||
|
run: |
|
||||||
|
Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip
|
||||||
|
unzip quickjs.zip
|
||||||
|
- name: Install test requirements
|
||||||
|
run: |
|
||||||
|
python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt
|
||||||
|
python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt
|
||||||
|
python -m pip install -U -r requirements.txt
|
||||||
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
|
run: |
|
||||||
|
python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true
|
||||||
|
python ./devscripts/run_tests.py test/test_jsc -k download
|
||||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -29,11 +29,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
@@ -60,6 +60,6 @@ jobs:
|
|||||||
# ./location_of_script_within_repo/buildscript.sh
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
|||||||
50
.github/workflows/core.yml
vendored
50
.github/workflows/core.yml
vendored
@@ -6,7 +6,8 @@ on:
|
|||||||
- devscripts/**
|
- devscripts/**
|
||||||
- test/**
|
- test/**
|
||||||
- yt_dlp/**.py
|
- yt_dlp/**.py
|
||||||
- '!yt_dlp/extractor/*.py'
|
- '!yt_dlp/extractor/**.py'
|
||||||
|
- yt_dlp/extractor/youtube/**.py
|
||||||
- yt_dlp/extractor/__init__.py
|
- yt_dlp/extractor/__init__.py
|
||||||
- yt_dlp/extractor/common.py
|
- yt_dlp/extractor/common.py
|
||||||
- yt_dlp/extractor/extractors.py
|
- yt_dlp/extractor/extractors.py
|
||||||
@@ -16,7 +17,8 @@ on:
|
|||||||
- devscripts/**
|
- devscripts/**
|
||||||
- test/**
|
- test/**
|
||||||
- yt_dlp/**.py
|
- yt_dlp/**.py
|
||||||
- '!yt_dlp/extractor/*.py'
|
- '!yt_dlp/extractor/**.py'
|
||||||
|
- yt_dlp/extractor/youtube/**.py
|
||||||
- yt_dlp/extractor/__init__.py
|
- yt_dlp/extractor/__init__.py
|
||||||
- yt_dlp/extractor/common.py
|
- yt_dlp/extractor/common.py
|
||||||
- yt_dlp/extractor/extractors.py
|
- yt_dlp/extractor/extractors.py
|
||||||
@@ -36,26 +38,54 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.8 is in quick-test
|
# CPython 3.10 is in quick-test
|
||||||
python-version: ['3.9', '3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.8'
|
python-version: '3.10'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.11'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.12'
|
python-version: '3.12'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: '3.13'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.14'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
continue-on-error: False
|
continue-on-error: False
|
||||||
|
env:
|
||||||
|
source: ${{ (github.event_name == 'push' && github.event.before) || 'origin/master' }}
|
||||||
|
target: ${{ (github.event_name == 'push' && github.event.after) || 'HEAD' }}
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
flags=()
|
||||||
|
# Check if a networking file is involved
|
||||||
|
patterns="\
|
||||||
|
^yt_dlp/networking/
|
||||||
|
^yt_dlp/utils/networking\.py$
|
||||||
|
^test/test_http_proxy\.py$
|
||||||
|
^test/test_networking\.py$
|
||||||
|
^test/test_networking_utils\.py$
|
||||||
|
^test/test_socks\.py$
|
||||||
|
^test/test_websockets\.py$
|
||||||
|
^pyproject\.toml$
|
||||||
|
"
|
||||||
|
if git diff --name-only "${source}" "${target}" | grep -Ef <(printf '%s' "${patterns}"); then
|
||||||
|
flags+=(--flaky)
|
||||||
|
fi
|
||||||
python3 -m yt_dlp -v || true # Print debug head
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
python3 ./devscripts/run_tests.py core
|
python3 -m devscripts.run_tests "${flags[@]}" --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||||
|
|||||||
24
.github/workflows/download.yml
vendored
24
.github/workflows/download.yml
vendored
@@ -9,16 +9,16 @@ jobs:
|
|||||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.10'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py --include dev
|
run: python ./devscripts/install_deps.py --include-group dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: python3 ./devscripts/run_tests.py download
|
run: python ./devscripts/run_tests.py download
|
||||||
|
|
||||||
full:
|
full:
|
||||||
name: Full Download Tests
|
name: Full Download Tests
|
||||||
@@ -28,21 +28,21 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.8'
|
python-version: '3.10'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py --include dev
|
run: python ./devscripts/install_deps.py --include-group dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: python3 ./devscripts/run_tests.py download
|
run: python ./devscripts/run_tests.py download
|
||||||
|
|||||||
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
name: Issue Lockdown
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lockdown:
|
||||||
|
name: Issue Lockdown
|
||||||
|
if: vars.ISSUE_LOCKDOWN
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "Lock new issue"
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
run: |
|
||||||
|
gh issue lock "${ISSUE_NUMBER}" -R "${REPOSITORY}"
|
||||||
25
.github/workflows/quick-test.yml
vendored
25
.github/workflows/quick-test.yml
vendored
@@ -9,31 +9,34 @@ jobs:
|
|||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- name: Set up Python 3.8
|
- name: Set up Python 3.10
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.10'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py --include test
|
run: python ./devscripts/install_deps.py --only-optional-groups --include-group test
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
run: |
|
run: |
|
||||||
python3 -m yt_dlp -v || true
|
python3 -m yt_dlp -v || true
|
||||||
python3 ./devscripts/run_tests.py core
|
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||||
check:
|
check:
|
||||||
name: Code check
|
name: Code check
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.10'
|
||||||
- name: Install dev dependencies
|
- name: Install dev dependencies
|
||||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
run: python3 ./devscripts/make_lazy_extractors.py
|
run: python ./devscripts/make_lazy_extractors.py
|
||||||
- name: Run ruff
|
- name: Run ruff
|
||||||
run: ruff check --output-format github .
|
run: ruff check --output-format github .
|
||||||
- name: Run autopep8
|
- name: Run autopep8
|
||||||
run: autopep8 --diff .
|
run: autopep8 --diff .
|
||||||
|
- name: Check file mode
|
||||||
|
run: git ls-files --format="%(objectmode) %(path)" yt_dlp/ | ( ! grep -v "^100644" )
|
||||||
|
|||||||
28
.github/workflows/release-master.yml
vendored
28
.github/workflows/release-master.yml
vendored
@@ -6,10 +6,12 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "yt_dlp/**.py"
|
- "yt_dlp/**.py"
|
||||||
- "!yt_dlp/version.py"
|
- "!yt_dlp/version.py"
|
||||||
- "bundle/*.py"
|
- "bundle/**"
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
- "Makefile"
|
- "Makefile"
|
||||||
- ".github/workflows/build.yml"
|
- ".github/workflows/build.yml"
|
||||||
|
- ".github/workflows/release.yml"
|
||||||
|
- ".github/workflows/release-master.yml"
|
||||||
concurrency:
|
concurrency:
|
||||||
group: release-master
|
group: release-master
|
||||||
permissions:
|
permissions:
|
||||||
@@ -17,14 +19,30 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
if: vars.BUILD_MASTER != ''
|
if: vars.BUILD_MASTER
|
||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: master
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||||
|
target: 'master'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.MASTER_PYPI_PROJECT
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v5
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|||||||
30
.github/workflows/release-nightly.yml
vendored
30
.github/workflows/release-nightly.yml
vendored
@@ -7,12 +7,12 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_nightly:
|
check_nightly:
|
||||||
if: vars.BUILD_NIGHTLY != ''
|
if: vars.BUILD_NIGHTLY
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Check for new commits
|
- name: Check for new commits
|
||||||
@@ -22,9 +22,13 @@ jobs:
|
|||||||
"yt_dlp/*.py"
|
"yt_dlp/*.py"
|
||||||
':!yt_dlp/version.py'
|
':!yt_dlp/version.py'
|
||||||
"bundle/*.py"
|
"bundle/*.py"
|
||||||
|
"bundle/docker/compose.yml"
|
||||||
|
"bundle/docker/linux/*"
|
||||||
"pyproject.toml"
|
"pyproject.toml"
|
||||||
"Makefile"
|
"Makefile"
|
||||||
".github/workflows/build.yml"
|
".github/workflows/build.yml"
|
||||||
|
".github/workflows/release.yml"
|
||||||
|
".github/workflows/release-nightly.yml"
|
||||||
)
|
)
|
||||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
@@ -34,10 +38,26 @@ jobs:
|
|||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: nightly
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||||
|
target: 'nightly'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.NIGHTLY_PYPI_PROJECT
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v5
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|||||||
308
.github/workflows/release.yml
vendored
308
.github/workflows/release.yml
vendored
@@ -2,10 +2,6 @@ name: Release
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
prerelease:
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
source:
|
source:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
@@ -18,6 +14,14 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
source:
|
source:
|
||||||
@@ -43,6 +47,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
description: Include linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
prerelease:
|
prerelease:
|
||||||
description: Pre-release
|
description: Pre-release
|
||||||
default: false
|
default: false
|
||||||
@@ -67,145 +75,67 @@ jobs:
|
|||||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||||
|
|
||||||
- name: Process inputs
|
- name: Process inputs
|
||||||
id: process_inputs
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
run: |
|
run: |
|
||||||
cat << EOF
|
python -m devscripts.setup_variables process_inputs
|
||||||
::group::Inputs
|
|
||||||
prerelease=${{ inputs.prerelease }}
|
|
||||||
source=${{ inputs.source }}
|
|
||||||
target=${{ inputs.target }}
|
|
||||||
version=${{ inputs.version }}
|
|
||||||
::endgroup::
|
|
||||||
EOF
|
|
||||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
|
||||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
|
||||||
cat << EOF >> "$GITHUB_OUTPUT"
|
|
||||||
source_repo=${source_repo}
|
|
||||||
source_tag=${source_tag}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Setup variables
|
- name: Setup variables
|
||||||
id: setup_variables
|
id: setup_variables
|
||||||
env:
|
env:
|
||||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||||
|
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||||
|
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
# unholy bash monstrosity (sincere apologies)
|
python -m devscripts.setup_variables
|
||||||
fallback_token () {
|
|
||||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
|
||||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
source_is_channel=0
|
- name: Update version & documentation
|
||||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
|
||||||
if [[ -z "${source_repo}" ]]; then
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
|
||||||
source_is_channel=1
|
|
||||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
|
||||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
|
||||||
source_tag="${source_repo}"
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
resolved_source="${source_repo}"
|
|
||||||
if [[ "${source_tag}" ]]; then
|
|
||||||
resolved_source="${resolved_source}@${source_tag}"
|
|
||||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
|
||||||
resolved_source='stable'
|
|
||||||
fi
|
|
||||||
|
|
||||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
|
||||||
version="$(
|
|
||||||
python devscripts/update-version.py \
|
|
||||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
|
||||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
|
||||||
|
|
||||||
if [[ "${target_repo}" ]]; then
|
|
||||||
if [[ -z "${target_tag}" ]]; then
|
|
||||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
else
|
|
||||||
target_tag="${target_repo}"
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
|
||||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
|
||||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
if ((source_is_channel)); then
|
|
||||||
target_repo="${source_channel}"
|
|
||||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
|
||||||
else
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
|
||||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::group::Output variables"
|
|
||||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
|
||||||
channel=${resolved_source}
|
|
||||||
version=${version}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_repo_token=${target_repo_token}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
pypi_project=${pypi_project}
|
|
||||||
pypi_suffix=${pypi_suffix}
|
|
||||||
EOF
|
|
||||||
echo "::endgroup::"
|
|
||||||
|
|
||||||
- name: Update documentation
|
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||||
if: |
|
REPOSITORY: ${{ github.repository }}
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
make doc
|
make doc
|
||||||
|
|
||||||
- name: Push to release
|
- name: Push to release
|
||||||
id: push_release
|
id: push_release
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||||
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git add -u
|
git add -u
|
||||||
git commit -m "Release ${{ env.version }}" \
|
git commit -m "Release ${VERSION}" \
|
||||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||||
|
|
||||||
- name: Get target commitish
|
- name: Get target commitish
|
||||||
id: get_target
|
id: get_target
|
||||||
@@ -214,10 +144,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Update master
|
- name: Update master
|
||||||
env:
|
env:
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: git push origin ${{ github.event.ref }}
|
run: git push origin "${GITHUB_EVENT_REF}"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: prepare
|
needs: prepare
|
||||||
@@ -226,10 +156,9 @@ jobs:
|
|||||||
version: ${{ needs.prepare.outputs.version }}
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
@@ -241,30 +170,30 @@ jobs:
|
|||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt -y install pandoc man
|
sudo apt -y install pandoc man
|
||||||
python devscripts/install_deps.py -o --include build
|
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
env:
|
env:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -278,7 +207,17 @@ jobs:
|
|||||||
make clean-cache
|
make clean-cache
|
||||||
python -m build --no-isolation .
|
python -m build --no-isolation .
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-pypi
|
||||||
|
path: |
|
||||||
|
dist/*
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
@@ -288,97 +227,100 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
|
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/download-artifact@v4
|
- uses: actions/download-artifact@v5
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-*
|
pattern: build-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Generate release notes
|
- name: Generate release notes
|
||||||
env:
|
env:
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
BASE_REPO: yt-dlp/yt-dlp
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||||
|
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||||
|
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||||
run: |
|
run: |
|
||||||
printf '%s' \
|
printf '%s' \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
"(https://github.com/${BASE_REPO}/blob/master/Maintainers.md#maintainers \"Donate\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}' \
|
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
printf '%s' \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||||
printf '\n\n' >> ./RELEASE_NOTES
|
fi
|
||||||
cat >> ./RELEASE_NOTES << EOF
|
printf '\n\n%s\n\n%s%s%s\n\n---\n' \
|
||||||
#### A description of the various files are in the [README](https://github.com/${{ github.repository }}#release-files)
|
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||||
---
|
"The zipimport Unix executable contains code licensed under ISC and MIT. " \
|
||||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
"The PyInstaller-bundled executables are subject to these and other licenses, all of which are compiled in " \
|
||||||
EOF
|
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||||
|
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
- name: Publish to archive repo
|
- name: Publish to archive repo
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||||
if: |
|
if: |
|
||||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
|
||||||
gh release create \
|
gh release create \
|
||||||
--notes-file ARCHIVE_NOTES \
|
--notes-file ARCHIVE_NOTES \
|
||||||
--title "${title} ${{ env.version }}" \
|
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||||
${{ env.version }} \
|
"${VERSION}" \
|
||||||
artifact/*
|
artifact/*
|
||||||
|
|
||||||
- name: Prune old release
|
- name: Prune old release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository && env.target_tag != env.version
|
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||||
run: |
|
run: |
|
||||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||||
git tag --delete "${{ env.target_tag }}" || true
|
git tag --delete "${TARGET_TAG}" || true
|
||||||
sleep 5 # Enough time to cover deletion race condition
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
- name: Publish release
|
- name: Publish release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository
|
env.TARGET_REPO == github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
gh_options=(
|
||||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
--notes-file "${NOTES_FILE}"
|
||||||
gh release create \
|
--target "${HEAD_SHA}"
|
||||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||||
--target ${{ env.head_sha }} \
|
)
|
||||||
--title "${title}${{ env.version }}" \
|
if ((PRERELEASE)); then
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
gh_options+=(--prerelease)
|
||||||
${{ env.target_tag }} \
|
fi
|
||||||
artifact/*
|
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||||
|
|||||||
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Sanitize comment
|
||||||
|
|
||||||
|
on:
|
||||||
|
issue_comment:
|
||||||
|
types: [created, edited]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sanitize-comment:
|
||||||
|
name: Sanitize comment
|
||||||
|
if: vars.SANITIZE_COMMENT && !github.event.issue.pull_request
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Sanitize comment
|
||||||
|
uses: yt-dlp/sanitize-comment@v1
|
||||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Test and lint workflows
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ACTIONLINT_VERSION: "1.7.8"
|
||||||
|
ACTIONLINT_SHA256SUM: be92c2652ab7b6d08425428797ceabeb16e31a781c07bc388456b4e592f3e36a
|
||||||
|
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check workflows
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||||
|
- name: Install requirements
|
||||||
|
env:
|
||||||
|
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||||
|
run: |
|
||||||
|
python -m devscripts.install_deps --only-optional-groups --include-group test
|
||||||
|
sudo apt -y install shellcheck
|
||||||
|
python -m pip install -U pyflakes
|
||||||
|
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||||
|
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||||
|
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||||
|
chmod +x actionlint
|
||||||
|
- name: Run actionlint
|
||||||
|
run: |
|
||||||
|
./actionlint -color
|
||||||
|
- name: Check Docker shell scripts
|
||||||
|
run: |
|
||||||
|
shellcheck bundle/docker/linux/*.sh
|
||||||
|
- name: Test GHA devscripts
|
||||||
|
run: |
|
||||||
|
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -92,6 +92,7 @@ updates_key.pem
|
|||||||
*.class
|
*.class
|
||||||
*.isorted
|
*.isorted
|
||||||
*.stackdump
|
*.stackdump
|
||||||
|
uv.lock
|
||||||
|
|
||||||
# Generated
|
# Generated
|
||||||
AUTHORS
|
AUTHORS
|
||||||
@@ -104,6 +105,9 @@ README.txt
|
|||||||
*.zsh
|
*.zsh
|
||||||
*.spec
|
*.spec
|
||||||
test/testdata/sigs/player-*.js
|
test/testdata/sigs/player-*.js
|
||||||
|
test/testdata/thumbnails/empty.webp
|
||||||
|
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||||
|
.ejs-*
|
||||||
|
|
||||||
# Binary
|
# Binary
|
||||||
/youtube-dl
|
/youtube-dl
|
||||||
@@ -126,3 +130,6 @@ yt-dlp.zip
|
|||||||
# Plugins
|
# Plugins
|
||||||
ytdlp_plugins/
|
ytdlp_plugins/
|
||||||
yt-dlp-plugins
|
yt-dlp-plugins
|
||||||
|
|
||||||
|
# Packages
|
||||||
|
yt_dlp_ejs/
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ # CONTRIBUTING TO YT-DLP
|
|||||||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||||
|
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||||
@@ -37,14 +38,18 @@ # OPENING AN ISSUE
|
|||||||
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
||||||
```
|
```
|
||||||
$ yt-dlp -vU <your command line>
|
$ yt-dlp -vU <your command line>
|
||||||
[debug] Command-line config: ['-v', 'demo.com']
|
[debug] Command-line config: ['-vU', 'https://www.example.com/']
|
||||||
[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2021.09.25 (zip)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python version 3.8.10 (CPython 64bit) - Linux-5.4.0-74-generic-x86_64-with-glibc2.29
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg 4.2.4, ffprobe 4.2.4
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
Current Build Hash 25cc412d1d3c0725a1f2f5b7e4682f6fb40e6d15f7024e96f7afd572e9919535
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
yt-dlp is up to date (2021.09.25)
|
[debug] Loaded 1838 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
||||||
@@ -122,7 +127,7 @@ ### Are you willing to share account details if needed?
|
|||||||
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
||||||
|
|
||||||
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
||||||
- Change the password before sharing the account to something random (use [this](https://passwordsgenerator.net/) if you don't have a random password generator).
|
- Change the password before sharing the account to something random.
|
||||||
- Change the password after receiving the account back.
|
- Change the password after receiving the account back.
|
||||||
|
|
||||||
### Is the website primarily used for piracy?
|
### Is the website primarily used for piracy?
|
||||||
@@ -130,6 +135,17 @@ ### Is the website primarily used for piracy?
|
|||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||||
|
|
||||||
|
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||||
|
|
||||||
|
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||||
|
|
||||||
|
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||||
|
|
||||||
|
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||||
|
|
||||||
|
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||||
|
|
||||||
|
|
||||||
# DEVELOPER INSTRUCTIONS
|
# DEVELOPER INSTRUCTIONS
|
||||||
@@ -161,7 +177,7 @@ # DEVELOPER INSTRUCTIONS
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
# To only install development dependencies:
|
# To only install development dependencies:
|
||||||
$ python -m devscripts.install_deps --include dev
|
$ python -m devscripts.install_deps --include-group dev
|
||||||
|
|
||||||
# Or, for an editable install plus dev dependencies:
|
# Or, for an editable install plus dev dependencies:
|
||||||
$ python -m pip install -e ".[default,dev]"
|
$ python -m pip install -e ".[default,dev]"
|
||||||
@@ -233,7 +249,7 @@ ## Adding support for a new site
|
|||||||
# * MD5 checksum; start the string with 'md5:', e.g.
|
# * MD5 checksum; start the string with 'md5:', e.g.
|
||||||
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
||||||
# * A regular expression; start the string with 're:', e.g.
|
# * A regular expression; start the string with 're:', e.g.
|
||||||
# 'thumbnail': r're:^https?://.*\.jpg$',
|
# 'thumbnail': r're:https?://.*\.jpg$',
|
||||||
# * A count of elements in a list; start the string with 'count:', e.g.
|
# * A count of elements in a list; start the string with 'count:', e.g.
|
||||||
# 'tags': 'count:10',
|
# 'tags': 'count:10',
|
||||||
# * Any Python type, e.g.
|
# * Any Python type, e.g.
|
||||||
@@ -268,7 +284,7 @@ ## Adding support for a new site
|
|||||||
|
|
||||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.8 and above. Backward compatibility is not required for even older versions of Python.
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@@ -302,10 +318,9 @@ ### Mandatory and optional metafields
|
|||||||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||||
|
|
||||||
- `id` (media identifier)
|
- `id` (media identifier)
|
||||||
- `title` (media title)
|
|
||||||
- `url` (media download URL) or `formats`
|
- `url` (media download URL) or `formats`
|
||||||
|
|
||||||
The aforementioned metafields are the critical data that the extraction does not make any sense without and if any of them fail to be extracted then the extractor is considered completely broken. While all extractors must return a `title`, they must also allow it's extraction to be non-fatal.
|
The aforementioned metadata fields are the critical data without which extraction does not make any sense. If any of them fail to be extracted, then the extractor is considered broken. All other metadata extraction should be completely non-fatal.
|
||||||
|
|
||||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||||
|
|
||||||
@@ -765,12 +780,10 @@ #### Examples
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# My pull request is labeled pending-fixes
|
## My pull request is labeled pending-fixes
|
||||||
|
|
||||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# EMBEDDING YT-DLP
|
# EMBEDDING YT-DLP
|
||||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||||
|
|||||||
193
CONTRIBUTORS
193
CONTRIBUTORS
@@ -1,9 +1,10 @@
|
|||||||
pukkandan (owner)
|
pukkandan (owner)
|
||||||
shirt-dev (collaborator)
|
shirt-dev (maintainer)
|
||||||
coletdjnz/colethedj (collaborator)
|
coletdjnz (maintainer)
|
||||||
Ashish0804 (collaborator)
|
Ashish0804 (maintainer)
|
||||||
bashonly (collaborator)
|
bashonly (maintainer)
|
||||||
Grub4K (collaborator)
|
Grub4K (maintainer)
|
||||||
|
seproDev (maintainer)
|
||||||
h-h-h-h
|
h-h-h-h
|
||||||
pauldubois98
|
pauldubois98
|
||||||
nixxo
|
nixxo
|
||||||
@@ -403,7 +404,6 @@ rebane2001
|
|||||||
road-master
|
road-master
|
||||||
rohieb
|
rohieb
|
||||||
sdht0
|
sdht0
|
||||||
seproDev
|
|
||||||
Hill-98
|
Hill-98
|
||||||
LXYan2333
|
LXYan2333
|
||||||
mushbite
|
mushbite
|
||||||
@@ -653,3 +653,184 @@ LeSuisse
|
|||||||
DunnesH
|
DunnesH
|
||||||
iancmy
|
iancmy
|
||||||
mokrueger
|
mokrueger
|
||||||
|
luvyana
|
||||||
|
szantnerb
|
||||||
|
hugepower
|
||||||
|
scribblemaniac
|
||||||
|
Codenade
|
||||||
|
Demon000
|
||||||
|
Deukhoofd
|
||||||
|
grqz
|
||||||
|
hibes
|
||||||
|
Khaoklong51
|
||||||
|
kieraneglin
|
||||||
|
lengzuo
|
||||||
|
naglis
|
||||||
|
ndyanx
|
||||||
|
otovalek
|
||||||
|
quad
|
||||||
|
rakslice
|
||||||
|
sahilsinghss73
|
||||||
|
tony-hn
|
||||||
|
xingchensong
|
||||||
|
BallzCrasher
|
||||||
|
coreywright
|
||||||
|
eric321
|
||||||
|
poyhen
|
||||||
|
tetra-fox
|
||||||
|
444995
|
||||||
|
63427083
|
||||||
|
allendema
|
||||||
|
DarkZeros
|
||||||
|
DTrombett
|
||||||
|
imranh2
|
||||||
|
KarboniteKream
|
||||||
|
mikkovedru
|
||||||
|
pktiuk
|
||||||
|
rubyevadestaxes
|
||||||
|
avagordon01
|
||||||
|
CounterPillow
|
||||||
|
JoseAngelB
|
||||||
|
KBelmin
|
||||||
|
kesor
|
||||||
|
MellowKyler
|
||||||
|
Wesley107772
|
||||||
|
a13ssandr0
|
||||||
|
ChocoLZS
|
||||||
|
doe1080
|
||||||
|
hugovdev
|
||||||
|
jshumphrey
|
||||||
|
julionc
|
||||||
|
manavchaudhary1
|
||||||
|
powergold1
|
||||||
|
Sakura286
|
||||||
|
SamDecrock
|
||||||
|
stratus-ss
|
||||||
|
subrat-lima
|
||||||
|
gitninja1234
|
||||||
|
jkruse
|
||||||
|
xiaomac
|
||||||
|
wesson09
|
||||||
|
Crypto90
|
||||||
|
MutantPiggieGolem1
|
||||||
|
Sanceilaks
|
||||||
|
Strkmn
|
||||||
|
0x9fff00
|
||||||
|
4ft35t
|
||||||
|
7x11x13
|
||||||
|
b5i
|
||||||
|
cotko
|
||||||
|
d3d9
|
||||||
|
Dioarya
|
||||||
|
finch71
|
||||||
|
hexahigh
|
||||||
|
InvalidUsernameException
|
||||||
|
jixunmoe
|
||||||
|
knackku
|
||||||
|
krandor
|
||||||
|
kvk-2015
|
||||||
|
lonble
|
||||||
|
msm595
|
||||||
|
n10dollar
|
||||||
|
NecroRomnt
|
||||||
|
pjrobertson
|
||||||
|
subsense
|
||||||
|
test20140
|
||||||
|
arantius
|
||||||
|
entourage8
|
||||||
|
lfavole
|
||||||
|
mp3butcher
|
||||||
|
slipinthedove
|
||||||
|
YoshiTabletopGamer
|
||||||
|
Arc8ne
|
||||||
|
benfaerber
|
||||||
|
chrisellsworth
|
||||||
|
fries1234
|
||||||
|
Kenshin9977
|
||||||
|
MichaelDeBoey
|
||||||
|
msikma
|
||||||
|
pedro
|
||||||
|
pferreir
|
||||||
|
red-acid
|
||||||
|
refack
|
||||||
|
rysson
|
||||||
|
somini
|
||||||
|
thedenv
|
||||||
|
vallovic
|
||||||
|
arabcoders
|
||||||
|
mireq
|
||||||
|
mlabeeb03
|
||||||
|
1271
|
||||||
|
CasperMcFadden95
|
||||||
|
Kicer86
|
||||||
|
Kiritomo
|
||||||
|
leeblackc
|
||||||
|
meGAmeS1
|
||||||
|
NeonMan
|
||||||
|
pj47x
|
||||||
|
troex
|
||||||
|
WouterGordts
|
||||||
|
baierjan
|
||||||
|
GeoffreyFrogeye
|
||||||
|
Pawka
|
||||||
|
v3DJG6GL
|
||||||
|
yozel
|
||||||
|
brian6932
|
||||||
|
iednod55
|
||||||
|
maxbin123
|
||||||
|
nullpos
|
||||||
|
anlar
|
||||||
|
eason1478
|
||||||
|
ceandreasen
|
||||||
|
chauhantirth
|
||||||
|
helpimnotdrowning
|
||||||
|
adamralph
|
||||||
|
averageFOSSenjoyer
|
||||||
|
bubo
|
||||||
|
flanter21
|
||||||
|
Georift
|
||||||
|
moonshinerd
|
||||||
|
R0hanW
|
||||||
|
ShockedPlot7560
|
||||||
|
swayll
|
||||||
|
atsushi2965
|
||||||
|
barryvan
|
||||||
|
injust
|
||||||
|
iribeirocampos
|
||||||
|
rolandcrosby
|
||||||
|
Sojiroh
|
||||||
|
tchebb
|
||||||
|
AzartX47
|
||||||
|
e2dk4r
|
||||||
|
junyilou
|
||||||
|
PierreMesure
|
||||||
|
Randalix
|
||||||
|
runarmod
|
||||||
|
gitchasing
|
||||||
|
zakaryan2004
|
||||||
|
cdce8p
|
||||||
|
nicolaasjan
|
||||||
|
willsmillie
|
||||||
|
CasualYT31
|
||||||
|
cecilia-sanare
|
||||||
|
dhwz
|
||||||
|
robin-mu
|
||||||
|
shssoichiro
|
||||||
|
thanhtaivtt
|
||||||
|
uoag
|
||||||
|
CaramelConnoisseur
|
||||||
|
ctengel
|
||||||
|
einstein95
|
||||||
|
evilpie
|
||||||
|
i3p9
|
||||||
|
JrM2628
|
||||||
|
krystophny
|
||||||
|
matyb08
|
||||||
|
pha1n0q
|
||||||
|
PierceLBrooks
|
||||||
|
sepro
|
||||||
|
TheQWERTYCodr
|
||||||
|
thomasmllt
|
||||||
|
w4grfw
|
||||||
|
WeidiDeng
|
||||||
|
Zer0spectrum
|
||||||
|
|||||||
1332
Changelog.md
1332
Changelog.md
File diff suppressed because it is too large
Load Diff
@@ -1,59 +1,36 @@
|
|||||||
# Collaborators
|
# Maintainers
|
||||||
|
|
||||||
This is a list of the collaborators of the project and their major contributions. See the [Changelog](Changelog.md) for more details.
|
This file lists the maintainers of yt-dlp and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||||
|
|
||||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||||
|
|
||||||
|
## Core Maintainers
|
||||||
|
|
||||||
## [pukkandan](https://github.com/pukkandan)
|
Core Maintainers are responsible for reviewing and merging contributions, publishing releases, and steering the overall direction of the project.
|
||||||
|
|
||||||
[](https://ko-fi.com/pukkandan)
|
**You can contact the core maintainers via `maintainers@yt-dlp.org`.**
|
||||||
[](https://github.com/sponsors/pukkandan)
|
|
||||||
|
|
||||||
* Owner of the fork
|
This is **NOT** a support channel. [Open an issue](https://github.com/yt-dlp/yt-dlp/issues/new/choose) if you need help or want to report a bug.
|
||||||
|
|
||||||
|
### [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
|
||||||
## [shirt](https://github.com/shirt-dev)
|
|
||||||
|
|
||||||
[](https://ko-fi.com/shirt)
|
|
||||||
|
|
||||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
|
||||||
* Support for media initialization and discontinuity in HLS
|
|
||||||
* The self-updater (`-U`)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [coletdjnz](https://github.com/coletdjnz)
|
|
||||||
|
|
||||||
[](https://github.com/sponsors/coletdjnz)
|
[](https://github.com/sponsors/coletdjnz)
|
||||||
|
|
||||||
* Improved plugin architecture
|
* Overhauled the networking stack and implemented support for `requests` and `curl_cffi` (`--impersonate`) HTTP clients
|
||||||
* Rewrote the networking infrastructure, implemented support for `requests`
|
* Reworked the plugin architecture to support installing plugins across all yt-dlp distributions (exe, pip, etc.)
|
||||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
* Maintains support for YouTube
|
||||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
* Added and fixed support for various other sites
|
||||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
|
||||||
|
### [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
* Rewrote and maintains the build/release workflows and the self-updater: executables, automated/nightly/master releases, `--update-to`
|
||||||
|
* Overhauled external downloader cookie handling
|
||||||
|
* Added `--cookies-from-browser` support for Firefox containers
|
||||||
|
* Overhauled and maintains support for sites like Youtube, Vimeo, Twitter, TikTok, etc
|
||||||
|
* Added support for sites like Dacast, Kick, Loom, SproutVideo, Triller, Weverse, etc
|
||||||
|
|
||||||
|
|
||||||
|
### [Grub4K](https://github.com/Grub4K)
|
||||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
|
||||||
|
|
||||||
[](https://ko-fi.com/ashish0804)
|
|
||||||
|
|
||||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
|
||||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
|
||||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
|
||||||
|
|
||||||
|
|
||||||
## [bashonly](https://github.com/bashonly)
|
|
||||||
|
|
||||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
|
||||||
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
|
||||||
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
|
||||||
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
|
||||||
|
|
||||||
|
|
||||||
## [Grub4K](https://github.com/Grub4K)
|
|
||||||
|
|
||||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||||
|
|
||||||
@@ -63,8 +40,48 @@ ## [Grub4K](https://github.com/Grub4K)
|
|||||||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||||
|
|
||||||
|
|
||||||
## [sepro](https://github.com/seproDev)
|
### [sepro](https://github.com/seproDev)
|
||||||
|
|
||||||
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||||
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||||
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||||
|
|
||||||
|
|
||||||
|
## Inactive Core Maintainers
|
||||||
|
|
||||||
|
### [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/pukkandan)
|
||||||
|
[](https://github.com/sponsors/pukkandan)
|
||||||
|
|
||||||
|
* Founder of the fork
|
||||||
|
* Lead Maintainer from 2021-2024
|
||||||
|
|
||||||
|
|
||||||
|
### [shirt](https://github.com/shirt-dev)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/shirt)
|
||||||
|
|
||||||
|
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||||
|
* Support for media initialization and discontinuity in HLS
|
||||||
|
* The self-updater (`-U`)
|
||||||
|
|
||||||
|
|
||||||
|
### [Ashish0804](https://github.com/Ashish0804)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/ashish0804)
|
||||||
|
|
||||||
|
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||||
|
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||||
|
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||||
|
|
||||||
|
## Triage Maintainers
|
||||||
|
|
||||||
|
Triage Maintainers are frequent contributors who can manage issues and pull requests.
|
||||||
|
|
||||||
|
- [gamer191](https://github.com/gamer191)
|
||||||
|
- [garret1317](https://github.com/garret1317)
|
||||||
|
- [pzhlkj6612](https://github.com/pzhlkj6612)
|
||||||
|
- [DTrombett](https://github.com/dtrombett)
|
||||||
|
- [doe1080](https://github.com/doe1080)
|
||||||
|
- [grqz](https://github.com/grqz)
|
||||||
114
Makefile
114
Makefile
@@ -1,4 +1,5 @@
|
|||||||
all: lazy-extractors yt-dlp doc pypi-files
|
all: lazy-extractors yt-dlp doc pypi-files
|
||||||
|
all-extra: lazy-extractors yt-dlp-extra doc pypi-files
|
||||||
clean: clean-test clean-dist
|
clean: clean-test clean-dist
|
||||||
clean-all: clean clean-cache
|
clean-all: clean clean-cache
|
||||||
completions: completion-bash completion-fish completion-zsh
|
completions: completion-bash completion-fish completion-zsh
|
||||||
@@ -10,21 +11,27 @@ tar: yt-dlp.tar.gz
|
|||||||
# intended use: when building a source distribution,
|
# intended use: when building a source distribution,
|
||||||
# make pypi-files && python3 -m build -sn .
|
# make pypi-files && python3 -m build -sn .
|
||||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||||
|
|
||||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||||
completions completion-bash completion-fish completion-zsh \
|
completions completion-bash completion-fish completion-zsh \
|
||||||
doc issuetemplates supportedsites ot offlinetest codetest test \
|
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||||
tar pypi-files lazy-extractors install uninstall
|
tar pypi-files lazy-extractors install uninstall \
|
||||||
|
all-extra yt-dlp-extra current-ejs-version
|
||||||
|
|
||||||
|
.IGNORE: current-ejs-version
|
||||||
|
.SILENT: current-ejs-version
|
||||||
|
|
||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp \
|
||||||
|
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS \
|
||||||
|
yt-dlp.zip .ejs-* yt_dlp_ejs/
|
||||||
clean-cache:
|
clean-cache:
|
||||||
find . \( \
|
find . \( \
|
||||||
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||||
@@ -80,28 +87,49 @@ test:
|
|||||||
offlinetest: codetest
|
offlinetest: codetest
|
||||||
$(PYTHON) -m pytest -Werror -m "not download"
|
$(PYTHON) -m pytest -Werror -m "not download"
|
||||||
|
|
||||||
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
PY_CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's|/__init__\.py||' | grep -v '/__' | sort
|
||||||
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
PY_CODE_FOLDERS != $(PY_CODE_FOLDERS_CMD)
|
||||||
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
PY_CODE_FOLDERS ?= $(shell $(PY_CODE_FOLDERS_CMD))
|
||||||
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
|
||||||
CODE_FILES != $(CODE_FILES_CMD)
|
PY_CODE_FILES_CMD = for f in $(PY_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.py|' ; done
|
||||||
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
PY_CODE_FILES != $(PY_CODE_FILES_CMD)
|
||||||
yt-dlp: $(CODE_FILES)
|
PY_CODE_FILES ?= $(shell $(PY_CODE_FILES_CMD))
|
||||||
|
|
||||||
|
JS_CODE_FOLDERS_CMD = find yt_dlp -type f -name '*.js' | sed 's|/[^/]\{1,\}\.js$$||' | uniq
|
||||||
|
JS_CODE_FOLDERS != $(JS_CODE_FOLDERS_CMD)
|
||||||
|
JS_CODE_FOLDERS ?= $(shell $(JS_CODE_FOLDERS_CMD))
|
||||||
|
|
||||||
|
JS_CODE_FILES_CMD = for f in $(JS_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.js|' ; done
|
||||||
|
JS_CODE_FILES != $(JS_CODE_FILES_CMD)
|
||||||
|
JS_CODE_FILES ?= $(shell $(JS_CODE_FILES_CMD))
|
||||||
|
|
||||||
|
yt-dlp.zip: $(PY_CODE_FILES) $(JS_CODE_FILES)
|
||||||
mkdir -p zip
|
mkdir -p zip
|
||||||
for d in $(CODE_FOLDERS) ; do \
|
for d in $(PY_CODE_FOLDERS) ; do \
|
||||||
mkdir -p zip/$$d ;\
|
mkdir -p zip/$$d ;\
|
||||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
done
|
done
|
||||||
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
for d in $(JS_CODE_FOLDERS) ; do \
|
||||||
mv zip/yt_dlp/__main__.py zip/
|
mkdir -p zip/$$d ;\
|
||||||
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||||
|
done
|
||||||
|
(cd zip && touch -t 200001010101 $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||||
|
rm -f zip/yt_dlp/__main__.py
|
||||||
|
(cd zip && zip -q ../yt-dlp.zip $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||||
rm -rf zip
|
rm -rf zip
|
||||||
|
|
||||||
|
yt-dlp: yt-dlp.zip
|
||||||
|
mkdir -p zip
|
||||||
|
cp -pP yt_dlp/__main__.py zip/
|
||||||
|
touch -t 200001010101 zip/__main__.py
|
||||||
|
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||||
echo '#!$(PYTHON)' > yt-dlp
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
cat yt-dlp.zip >> yt-dlp
|
cat yt-dlp.zip >> yt-dlp
|
||||||
rm yt-dlp.zip
|
rm yt-dlp.zip
|
||||||
chmod a+x yt-dlp
|
chmod a+x yt-dlp
|
||||||
|
rm -rf zip
|
||||||
|
|
||||||
README.md: $(CODE_FILES) devscripts/make_readme.py
|
README.md: $(PY_CODE_FILES) devscripts/make_readme.py
|
||||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||||
|
|
||||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||||
@@ -126,15 +154,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
|||||||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||||
rm -f yt-dlp.1.temp.md
|
rm -f yt-dlp.1.temp.md
|
||||||
|
|
||||||
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
completions/bash/yt-dlp: $(PY_CODE_FILES) devscripts/bash-completion.in
|
||||||
mkdir -p completions/bash
|
mkdir -p completions/bash
|
||||||
$(PYTHON) devscripts/bash-completion.py
|
$(PYTHON) devscripts/bash-completion.py
|
||||||
|
|
||||||
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
completions/zsh/_yt-dlp: $(PY_CODE_FILES) devscripts/zsh-completion.in
|
||||||
mkdir -p completions/zsh
|
mkdir -p completions/zsh
|
||||||
$(PYTHON) devscripts/zsh-completion.py
|
$(PYTHON) devscripts/zsh-completion.py
|
||||||
|
|
||||||
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
completions/fish/yt-dlp.fish: $(PY_CODE_FILES) devscripts/fish-completion.in
|
||||||
mkdir -p completions/fish
|
mkdir -p completions/fish
|
||||||
$(PYTHON) devscripts/fish-completion.py
|
$(PYTHON) devscripts/fish-completion.py
|
||||||
|
|
||||||
@@ -156,9 +184,9 @@ yt-dlp.tar.gz: all
|
|||||||
--exclude '.git' \
|
--exclude '.git' \
|
||||||
-- \
|
-- \
|
||||||
README.md supportedsites.md Changelog.md LICENSE \
|
README.md supportedsites.md Changelog.md LICENSE \
|
||||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
CONTRIBUTING.md Maintainers.md CONTRIBUTORS AUTHORS \
|
||||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||||
|
|
||||||
AUTHORS: Changelog.md
|
AUTHORS: Changelog.md
|
||||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
@@ -171,3 +199,45 @@ CONTRIBUTORS: Changelog.md
|
|||||||
echo 'Updating $@ from git commit history' ; \
|
echo 'Updating $@ from git commit history' ; \
|
||||||
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# The following EJS_-prefixed variables are auto-generated by devscripts/update_ejs.py
|
||||||
|
# DO NOT EDIT!
|
||||||
|
EJS_VERSION = 0.3.1
|
||||||
|
EJS_WHEEL_NAME = yt_dlp_ejs-0.3.1-py3-none-any.whl
|
||||||
|
EJS_WHEEL_HASH = sha256:a6e3548874db7c774388931752bb46c7f4642c044b2a189e56968f3d5ecab622
|
||||||
|
EJS_PY_FOLDERS = yt_dlp_ejs yt_dlp_ejs/yt yt_dlp_ejs/yt/solver
|
||||||
|
EJS_PY_FILES = yt_dlp_ejs/__init__.py yt_dlp_ejs/_version.py yt_dlp_ejs/yt/__init__.py yt_dlp_ejs/yt/solver/__init__.py
|
||||||
|
EJS_JS_FOLDERS = yt_dlp_ejs/yt/solver
|
||||||
|
EJS_JS_FILES = yt_dlp_ejs/yt/solver/core.min.js yt_dlp_ejs/yt/solver/lib.min.js
|
||||||
|
|
||||||
|
yt-dlp-extra: current-ejs-version .ejs-$(EJS_VERSION) $(EJS_PY_FILES) $(EJS_JS_FILES) yt-dlp.zip
|
||||||
|
mkdir -p zip
|
||||||
|
for d in $(EJS_PY_FOLDERS) ; do \
|
||||||
|
mkdir -p zip/$$d ;\
|
||||||
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
|
done
|
||||||
|
for d in $(EJS_JS_FOLDERS) ; do \
|
||||||
|
mkdir -p zip/$$d ;\
|
||||||
|
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||||
|
done
|
||||||
|
(cd zip && touch -t 200001010101 $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||||
|
(cd zip && zip -q ../yt-dlp.zip $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||||
|
cp -pP yt_dlp/__main__.py zip/
|
||||||
|
touch -t 200001010101 zip/__main__.py
|
||||||
|
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||||
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
|
cat yt-dlp.zip >> yt-dlp
|
||||||
|
rm yt-dlp.zip
|
||||||
|
chmod a+x yt-dlp
|
||||||
|
rm -rf zip
|
||||||
|
|
||||||
|
.ejs-$(EJS_VERSION):
|
||||||
|
@echo Downloading yt-dlp-ejs
|
||||||
|
@echo "yt-dlp-ejs==$(EJS_VERSION) --hash $(EJS_WHEEL_HASH)" > .ejs-requirements.txt
|
||||||
|
$(PYTHON) -m pip download -d ./build --no-deps --require-hashes -r .ejs-requirements.txt
|
||||||
|
unzip -o build/$(EJS_WHEEL_NAME) "yt_dlp_ejs/*"
|
||||||
|
@touch .ejs-$(EJS_VERSION)
|
||||||
|
|
||||||
|
current-ejs-version:
|
||||||
|
rm -rf .ejs-*
|
||||||
|
touch .ejs-$$($(PYTHON) -c 'import sys; sys.path = [""]; from yt_dlp_ejs import version; print(version)' 2>/dev/null)
|
||||||
|
|||||||
454
README.md
454
README.md
@@ -4,9 +4,8 @@
|
|||||||
[](#readme)
|
[](#readme)
|
||||||
|
|
||||||
[](#installation "Installation")
|
[](#installation "Installation")
|
||||||
[](https://pypi.org/project/yt-dlp "PyPi")
|
[](https://pypi.org/project/yt-dlp "PyPI")
|
||||||
[](Collaborators.md#collaborators "Donate")
|
[](Maintainers.md#maintainers "Donate")
|
||||||
[](https://matrix.to/#/#yt-dlp:matrix.org "Matrix")
|
|
||||||
[](https://discord.gg/H5MNcFW63r "Discord")
|
[](https://discord.gg/H5MNcFW63r "Discord")
|
||||||
[](supportedsites.md "Supported Sites")
|
[](supportedsites.md "Supported Sites")
|
||||||
[](LICENSE "License")
|
[](LICENSE "License")
|
||||||
@@ -45,6 +44,7 @@
|
|||||||
* [Post-processing Options](#post-processing-options)
|
* [Post-processing Options](#post-processing-options)
|
||||||
* [SponsorBlock Options](#sponsorblock-options)
|
* [SponsorBlock Options](#sponsorblock-options)
|
||||||
* [Extractor Options](#extractor-options)
|
* [Extractor Options](#extractor-options)
|
||||||
|
* [Preset Aliases](#preset-aliases)
|
||||||
* [CONFIGURATION](#configuration)
|
* [CONFIGURATION](#configuration)
|
||||||
* [Configuration file encoding](#configuration-file-encoding)
|
* [Configuration file encoding](#configuration-file-encoding)
|
||||||
* [Authentication with netrc](#authentication-with-netrc)
|
* [Authentication with netrc](#authentication-with-netrc)
|
||||||
@@ -81,7 +81,7 @@ # INSTALLATION
|
|||||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)
|
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)
|
||||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)
|
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)
|
||||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)
|
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)
|
||||||
[](https://pypi.org/project/yt-dlp)
|
[](https://pypi.org/project/yt-dlp)
|
||||||
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
[](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||||
[](#release-files)
|
[](#release-files)
|
||||||
[](https://github.com/yt-dlp/yt-dlp/releases)
|
[](https://github.com/yt-dlp/yt-dlp/releases)
|
||||||
@@ -98,21 +98,28 @@ #### Recommended
|
|||||||
File|Description
|
File|Description
|
||||||
:---|:---
|
:---|:---
|
||||||
[yt-dlp](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)|Platform-independent [zipimport](https://docs.python.org/3/library/zipimport.html) binary. Needs Python (recommended for **Linux/BSD**)
|
[yt-dlp](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp)|Platform-independent [zipimport](https://docs.python.org/3/library/zipimport.html) binary. Needs Python (recommended for **Linux/BSD**)
|
||||||
[yt-dlp.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)|Windows (Win7 SP1+) standalone x64 binary (recommended for **Windows**)
|
[yt-dlp.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe)|Windows (Win8+) standalone x64 binary (recommended for **Windows**)
|
||||||
[yt-dlp_macos](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)|Universal MacOS (10.15+) standalone executable (recommended for **MacOS**)
|
[yt-dlp_macos](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos)|Universal MacOS (10.15+) standalone executable (recommended for **MacOS**)
|
||||||
|
|
||||||
#### Alternatives
|
#### Alternatives
|
||||||
|
|
||||||
File|Description
|
File|Description
|
||||||
:---|:---
|
:---|:---
|
||||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win7 SP1+) standalone x86 (32-bit) binary
|
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
|
||||||
[yt-dlp_min.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_min.exe)|Windows (Win7 SP1+) standalone x64 binary built with `py2exe`<br/> ([Not recommended](#standalone-py2exe-builds-windows))
|
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
|
||||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
|
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
|
||||||
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
|
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
|
||||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
|
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
|
||||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows executable (no auto-update)
|
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
|
||||||
|
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
|
||||||
|
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
|
||||||
|
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
|
||||||
|
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
||||||
|
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
|
||||||
|
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
|
||||||
|
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
|
||||||
|
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
||||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||||
[yt-dlp_macos_legacy](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos_legacy)|MacOS (10.9+) standalone x64 executable
|
|
||||||
|
|
||||||
#### Misc
|
#### Misc
|
||||||
|
|
||||||
@@ -131,6 +138,19 @@ #### Misc
|
|||||||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Licensing
|
||||||
|
|
||||||
|
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||||
|
|
||||||
|
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||||
|
|
||||||
|
The zipimport Unix executable (`yt-dlp`) contains [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) licensed code from [`meriyah`](https://github.com/meriyah/meriyah) and [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) licensed code from [`astring`](https://github.com/davidbonnet/astring).
|
||||||
|
|
||||||
|
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for more details.
|
||||||
|
|
||||||
|
The git repository, the source tarball (`yt-dlp.tar.gz`), the PyPI source distribution and the PyPI built distribution (wheel) only contain code licensed under the [Unlicense](LICENSE).
|
||||||
|
|
||||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||||
|
|
||||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||||
@@ -169,18 +189,21 @@ # To update to nightly from stable executable/binary:
|
|||||||
yt-dlp --update-to nightly
|
yt-dlp --update-to nightly
|
||||||
|
|
||||||
# To install nightly with pip:
|
# To install nightly with pip:
|
||||||
python3 -m pip install -U --pre "yt-dlp[default]"
|
python -m pip install -U --pre "yt-dlp[default]"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
When running a yt-dlp version that is older than 90 days, you will see a warning message suggesting to update to the latest version.
|
||||||
|
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||||
|
|
||||||
## DEPENDENCIES
|
## DEPENDENCIES
|
||||||
Python versions 3.8+ (CPython and PyPy) are supported. Other versions and implementations may or may not work correctly.
|
Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||||
|
|
||||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||||
On windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually.
|
On Windows, [Microsoft Visual C++ 2010 SP1 Redistributable Package (x86)](https://download.microsoft.com/download/1/6/5/165255E7-1014-4D0A-B094-B6A430A6BFFC/vcredist_x86.exe) is also necessary to run yt-dlp. You probably already have this, but if the executable throws an error due to missing `MSVCR100.dll` you need to install it manually.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly recommended
|
While all the other dependencies are optional, `ffmpeg`, `ffprobe`, `yt-dlp-ejs` and a JavaScript runtime are highly recommended
|
||||||
|
|
||||||
### Strongly recommended
|
### Strongly recommended
|
||||||
|
|
||||||
@@ -190,6 +213,10 @@ ### Strongly recommended
|
|||||||
|
|
||||||
**Important**: What you need is ffmpeg *binary*, **NOT** [the Python package of the same name](https://pypi.org/project/ffmpeg)
|
**Important**: What you need is ffmpeg *binary*, **NOT** [the Python package of the same name](https://pypi.org/project/ffmpeg)
|
||||||
|
|
||||||
|
* [**yt-dlp-ejs**](https://github.com/yt-dlp/ejs) - Required for deciphering YouTube n/sig values. Licensed under [Unlicense](https://github.com/yt-dlp/ejs/blob/main/LICENSE), bundles [MIT](https://github.com/davidbonnet/astring/blob/main/LICENSE) and [ISC](https://github.com/meriyah/meriyah/blob/main/LICENSE.md) components.
|
||||||
|
|
||||||
|
A JavaScript runtime like [**deno**](https://deno.land) (recommended), [**node.js**](https://nodejs.org), [**bun**](https://bun.sh), or [**QuickJS**](https://bellard.org/quickjs/) is also required to run yt-dlp-ejs. See [the wiki](https://github.com/yt-dlp/yt-dlp/wiki/EJS).
|
||||||
|
|
||||||
### Networking
|
### Networking
|
||||||
* [**certifi**](https://github.com/certifi/python-certifi)\* - Provides Mozilla's root certificate bundle. Licensed under [MPLv2](https://github.com/certifi/python-certifi/blob/master/LICENSE)
|
* [**certifi**](https://github.com/certifi/python-certifi)\* - Provides Mozilla's root certificate bundle. Licensed under [MPLv2](https://github.com/certifi/python-certifi/blob/master/LICENSE)
|
||||||
* [**brotli**](https://github.com/google/brotli)\* or [**brotlicffi**](https://github.com/python-hyper/brotlicffi) - [Brotli](https://en.wikipedia.org/wiki/Brotli) content encoding support. Both licensed under MIT <sup>[1](https://github.com/google/brotli/blob/master/LICENSE) [2](https://github.com/python-hyper/brotlicffi/blob/master/LICENSE) </sup>
|
* [**brotli**](https://github.com/google/brotli)\* or [**brotlicffi**](https://github.com/python-hyper/brotlicffi) - [Brotli](https://en.wikipedia.org/wiki/Brotli) content encoding support. Both licensed under MIT <sup>[1](https://github.com/google/brotli/blob/master/LICENSE) [2](https://github.com/python-hyper/brotlicffi/blob/master/LICENSE) </sup>
|
||||||
@@ -200,28 +227,26 @@ #### Impersonation
|
|||||||
|
|
||||||
The following provide support for impersonating browser requests. This may be required for some sites that employ TLS fingerprinting.
|
The following provide support for impersonating browser requests. This may be required for some sites that employ TLS fingerprinting.
|
||||||
|
|
||||||
* [**curl_cffi**](https://github.com/yifeikong/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lwthiker/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/yifeikong/curl_cffi/blob/main/LICENSE)
|
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
||||||
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
||||||
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds
|
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
|
||||||
|
|
||||||
|
|
||||||
### Metadata
|
### Metadata
|
||||||
|
|
||||||
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
||||||
* [**AtomicParsley**](https://github.com/wez/atomicparsley) - For `--embed-thumbnail` in `mp4`/`m4a` files when `mutagen`/`ffmpeg` cannot. Licensed under [GPLv2+](https://github.com/wez/atomicparsley/blob/master/COPYING)
|
* [**AtomicParsley**](https://github.com/wez/atomicparsley) - For `--embed-thumbnail` in `mp4`/`m4a` files when `mutagen`/`ffmpeg` cannot. Licensed under [GPLv2+](https://github.com/wez/atomicparsley/blob/master/COPYING)
|
||||||
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattr`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
* [**xattr**](https://github.com/xattr/xattr), [**pyxattr**](https://github.com/iustin/pyxattr) or [**setfattr**](http://savannah.nongnu.org/projects/attr) - For writing xattr metadata (`--xattrs`) on **Mac** and **BSD**. Licensed under [MIT](https://github.com/xattr/xattr/blob/master/LICENSE.txt), [LGPL2.1](https://github.com/iustin/pyxattr/blob/master/COPYING) and [GPLv2+](http://git.savannah.nongnu.org/cgit/attr.git/tree/doc/COPYING) respectively
|
||||||
|
|
||||||
### Misc
|
### Misc
|
||||||
|
|
||||||
* [**pycryptodomex**](https://github.com/Legrandin/pycryptodome)\* - For decrypting AES-128 HLS streams and various other data. Licensed under [BSD-2-Clause](https://github.com/Legrandin/pycryptodome/blob/master/LICENSE.rst)
|
* [**pycryptodomex**](https://github.com/Legrandin/pycryptodome)\* - For decrypting AES-128 HLS streams and various other data. Licensed under [BSD-2-Clause](https://github.com/Legrandin/pycryptodome/blob/master/LICENSE.rst)
|
||||||
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in extractors where javascript needs to be run. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
* [**phantomjs**](https://github.com/ariya/phantomjs) - Used in some extractors where JavaScript needs to be run. No longer used for YouTube. To be deprecated in the near future. Licensed under [BSD-3-Clause](https://github.com/ariya/phantomjs/blob/master/LICENSE.BSD)
|
||||||
* [**secretstorage**](https://github.com/mitya57/secretstorage)\* - For `--cookies-from-browser` to access the **Gnome** keyring while decrypting cookies of **Chromium**-based browsers on **Linux**. Licensed under [BSD-3-Clause](https://github.com/mitya57/secretstorage/blob/master/LICENSE)
|
* [**secretstorage**](https://github.com/mitya57/secretstorage)\* - For `--cookies-from-browser` to access the **Gnome** keyring while decrypting cookies of **Chromium**-based browsers on **Linux**. Licensed under [BSD-3-Clause](https://github.com/mitya57/secretstorage/blob/master/LICENSE)
|
||||||
* Any external downloader that you want to use with `--downloader`
|
* Any external downloader that you want to use with `--downloader`
|
||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
|
|
||||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
|
||||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
|
||||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||||
|
|
||||||
@@ -240,12 +265,12 @@ ### Standalone PyInstaller Builds
|
|||||||
You can run the following commands:
|
You can run the following commands:
|
||||||
|
|
||||||
```
|
```
|
||||||
python3 devscripts/install_deps.py --include pyinstaller
|
python devscripts/install_deps.py --include-group pyinstaller
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
python3 -m bundle.pyinstaller
|
python -m bundle.pyinstaller
|
||||||
```
|
```
|
||||||
|
|
||||||
On some systems, you may need to use `py` or `python` instead of `python3`.
|
On some systems, you may need to use `py` or `python3` instead of `python`.
|
||||||
|
|
||||||
`python -m bundle.pyinstaller` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
`python -m bundle.pyinstaller` accepts any arguments that can be passed to `pyinstaller`, such as `--onefile/-F` or `--onedir/-D`, which is further [documented here](https://pyinstaller.org/en/stable/usage.html#what-to-generate).
|
||||||
|
|
||||||
@@ -254,31 +279,19 @@ ### Standalone PyInstaller Builds
|
|||||||
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
||||||
|
|
||||||
### Platform-independent Binary (UNIX)
|
### Platform-independent Binary (UNIX)
|
||||||
You will need the build tools `python` (3.8+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
You will need the build tools `python` (3.10+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||||
|
|
||||||
After installing these, simply run `make`.
|
After installing these, simply run `make`.
|
||||||
|
|
||||||
You can also run `make yt-dlp` instead to compile only the binary without updating any of the additional files. (The build tools marked with **\*** are not needed for this)
|
You can also run `make yt-dlp` instead to compile only the binary without updating any of the additional files. (The build tools marked with **\*** are not needed for this)
|
||||||
|
|
||||||
### Standalone Py2Exe Builds (Windows)
|
|
||||||
|
|
||||||
While we provide the option to build with [py2exe](https://www.py2exe.org), it is recommended to build [using PyInstaller](#standalone-pyinstaller-builds) instead since the py2exe builds **cannot contain `pycryptodomex`/`certifi`/`requests` and need VC++14** on the target computer to run.
|
|
||||||
|
|
||||||
If you wish to build it anyway, install Python (if it is not already installed) and you can run the following commands:
|
|
||||||
|
|
||||||
```
|
|
||||||
py devscripts/install_deps.py --include py2exe
|
|
||||||
py devscripts/make_lazy_extractors.py
|
|
||||||
py -m bundle.py2exe
|
|
||||||
```
|
|
||||||
|
|
||||||
### Related scripts
|
### Related scripts
|
||||||
|
|
||||||
* **`devscripts/install_deps.py`** - Install dependencies for yt-dlp.
|
* **`devscripts/install_deps.py`** - Install dependencies for yt-dlp.
|
||||||
* **`devscripts/update-version.py`** - Update the version number based on the current date.
|
* **`devscripts/update-version.py`** - Update the version number based on the current date.
|
||||||
* **`devscripts/set-variant.py`** - Set the build variant of the executable.
|
* **`devscripts/set-variant.py`** - Set the build variant of the executable.
|
||||||
* **`devscripts/make_changelog.py`** - Create a markdown changelog using short commit messages and update `CONTRIBUTORS` file.
|
* **`devscripts/make_changelog.py`** - Create a markdown changelog using short commit messages and update `CONTRIBUTORS` file.
|
||||||
* **`devscripts/make_lazy_extractors.py`** - Create lazy extractors. Running this before building the binaries (any variant) will improve their startup performance. Set the environment variable `YTDLP_NO_LAZY_EXTRACTORS=1` if you wish to forcefully disable lazy extractor loading.
|
* **`devscripts/make_lazy_extractors.py`** - Create lazy extractors. Running this before building the binaries (any variant) will improve their startup performance. Set the environment variable `YTDLP_NO_LAZY_EXTRACTORS` to something nonempty to forcefully disable lazy extractor loading.
|
||||||
|
|
||||||
Note: See their `--help` for more info.
|
Note: See their `--help` for more info.
|
||||||
|
|
||||||
@@ -290,7 +303,7 @@ # USAGE AND OPTIONS
|
|||||||
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
||||||
yt-dlp [OPTIONS] [--] URL [URL...]
|
yt-dlp [OPTIONS] [--] URL [URL...]
|
||||||
|
|
||||||
`Ctrl+F` is your friend :D
|
Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||||
|
|
||||||
<!-- Auto generated -->
|
<!-- Auto generated -->
|
||||||
@@ -313,7 +326,6 @@ ## General Options:
|
|||||||
playlist (default)
|
playlist (default)
|
||||||
--abort-on-error Abort downloading of further videos if an
|
--abort-on-error Abort downloading of further videos if an
|
||||||
error occurs (Alias: --no-ignore-errors)
|
error occurs (Alias: --no-ignore-errors)
|
||||||
--dump-user-agent Display the current user-agent and exit
|
|
||||||
--list-extractors List all supported extractors and exit
|
--list-extractors List all supported extractors and exit
|
||||||
--extractor-descriptions Output descriptions of all supported
|
--extractor-descriptions Output descriptions of all supported
|
||||||
extractors and exit
|
extractors and exit
|
||||||
@@ -348,13 +360,53 @@ ## General Options:
|
|||||||
containing directory ("-" for stdin). Can be
|
containing directory ("-" for stdin). Can be
|
||||||
used multiple times and inside other
|
used multiple times and inside other
|
||||||
configuration files
|
configuration files
|
||||||
--flat-playlist Do not extract the videos of a playlist,
|
--plugin-dirs DIR Path to an additional directory to search
|
||||||
only list them
|
for plugins. This option can be used
|
||||||
|
multiple times to add multiple directories.
|
||||||
|
Use "default" to search the default plugin
|
||||||
|
directories (default)
|
||||||
|
--no-plugin-dirs Clear plugin directories to search,
|
||||||
|
including defaults and those provided by
|
||||||
|
previous --plugin-dirs
|
||||||
|
--js-runtimes RUNTIME[:PATH] Additional JavaScript runtime to enable,
|
||||||
|
with an optional location for the runtime
|
||||||
|
(either the path to the binary or its
|
||||||
|
containing directory). This option can be
|
||||||
|
used multiple times to enable multiple
|
||||||
|
runtimes. Supported runtimes are (in order
|
||||||
|
of priority, from highest to lowest): deno,
|
||||||
|
node, quickjs, bun. Only "deno" is enabled
|
||||||
|
by default. The highest priority runtime
|
||||||
|
that is both enabled and available will be
|
||||||
|
used. In order to use a lower priority
|
||||||
|
runtime when "deno" is available, --no-js-
|
||||||
|
runtimes needs to be passed before enabling
|
||||||
|
other runtimes
|
||||||
|
--no-js-runtimes Clear JavaScript runtimes to enable,
|
||||||
|
including defaults and those provided by
|
||||||
|
previous --js-runtimes
|
||||||
|
--remote-components COMPONENT Remote components to allow yt-dlp to fetch
|
||||||
|
when required. This option is currently not
|
||||||
|
needed if you are using an official
|
||||||
|
executable or have the requisite version of
|
||||||
|
the yt-dlp-ejs package installed. You can
|
||||||
|
use this option multiple times to allow
|
||||||
|
multiple components. Supported values:
|
||||||
|
ejs:npm (external JavaScript components from
|
||||||
|
npm), ejs:github (external JavaScript
|
||||||
|
components from yt-dlp-ejs GitHub). By
|
||||||
|
default, no remote components are allowed
|
||||||
|
--no-remote-components Disallow fetching of all remote components,
|
||||||
|
including any previously allowed by
|
||||||
|
--remote-components or defaults.
|
||||||
|
--flat-playlist Do not extract a playlist's URL result
|
||||||
|
entries; some entry metadata may be missing
|
||||||
|
and downloading may be bypassed
|
||||||
--no-flat-playlist Fully extract the videos of a playlist
|
--no-flat-playlist Fully extract the videos of a playlist
|
||||||
(default)
|
(default)
|
||||||
--live-from-start Download livestreams from the start.
|
--live-from-start Download livestreams from the start.
|
||||||
Currently only supported for YouTube
|
Currently experimental and only supported
|
||||||
(Experimental)
|
for YouTube and Twitch
|
||||||
--no-live-from-start Download livestreams from the current time
|
--no-live-from-start Download livestreams from the current time
|
||||||
(default)
|
(default)
|
||||||
--wait-for-video MIN[-MAX] Wait for scheduled streams to become
|
--wait-for-video MIN[-MAX] Wait for scheduled streams to become
|
||||||
@@ -380,17 +432,23 @@ ## General Options:
|
|||||||
an alias starts with a dash "-", it is
|
an alias starts with a dash "-", it is
|
||||||
prefixed with "--". Arguments are parsed
|
prefixed with "--". Arguments are parsed
|
||||||
according to the Python string formatting
|
according to the Python string formatting
|
||||||
mini-language. E.g. --alias get-audio,-X
|
mini-language. E.g. --alias get-audio,-X "-S
|
||||||
"-S=aext:{0},abr -x --audio-format {0}"
|
aext:{0},abr -x --audio-format {0}" creates
|
||||||
creates options "--get-audio" and "-X" that
|
options "--get-audio" and "-X" that takes an
|
||||||
takes an argument (ARG0) and expands to
|
argument (ARG0) and expands to "-S
|
||||||
"-S=aext:ARG0,abr -x --audio-format ARG0".
|
aext:ARG0,abr -x --audio-format ARG0". All
|
||||||
All defined aliases are listed in the --help
|
defined aliases are listed in the --help
|
||||||
output. Alias options can trigger more
|
output. Alias options can trigger more
|
||||||
aliases; so be careful to avoid defining
|
aliases; so be careful to avoid defining
|
||||||
recursive options. As a safety measure, each
|
recursive options. As a safety measure, each
|
||||||
alias may be triggered a maximum of 100
|
alias may be triggered a maximum of 100
|
||||||
times. This option can be used multiple times
|
times. This option can be used multiple times
|
||||||
|
-t, --preset-alias PRESET Applies a predefined set of options. e.g.
|
||||||
|
--preset-alias mp3. The following presets
|
||||||
|
are available: mp3, aac, mp4, mkv, sleep.
|
||||||
|
See the "Preset Aliases" section at the end
|
||||||
|
for more info. This option can be used
|
||||||
|
multiple times
|
||||||
|
|
||||||
## Network Options:
|
## Network Options:
|
||||||
--proxy URL Use the specified HTTP/HTTPS/SOCKS proxy. To
|
--proxy URL Use the specified HTTP/HTTPS/SOCKS proxy. To
|
||||||
@@ -444,10 +502,10 @@ ## Video Selection:
|
|||||||
E.g. "--date today-2weeks" downloads only
|
E.g. "--date today-2weeks" downloads only
|
||||||
videos uploaded on the same day two weeks ago
|
videos uploaded on the same day two weeks ago
|
||||||
--datebefore DATE Download only videos uploaded on or before
|
--datebefore DATE Download only videos uploaded on or before
|
||||||
this date. The date formats accepted is the
|
this date. The date formats accepted are the
|
||||||
same as --date
|
same as --date
|
||||||
--dateafter DATE Download only videos uploaded on or after
|
--dateafter DATE Download only videos uploaded on or after
|
||||||
this date. The date formats accepted is the
|
this date. The date formats accepted are the
|
||||||
same as --date
|
same as --date
|
||||||
--match-filters FILTER Generic video filter. Any "OUTPUT TEMPLATE"
|
--match-filters FILTER Generic video filter. Any "OUTPUT TEMPLATE"
|
||||||
field can be compared with a number or a
|
field can be compared with a number or a
|
||||||
@@ -459,17 +517,17 @@ ## Video Selection:
|
|||||||
conditions. Use a "\" to escape "&" or
|
conditions. Use a "\" to escape "&" or
|
||||||
quotes if needed. If used multiple times,
|
quotes if needed. If used multiple times,
|
||||||
the filter matches if at least one of the
|
the filter matches if at least one of the
|
||||||
conditions is met. E.g. --match-filter
|
conditions is met. E.g. --match-filters
|
||||||
!is_live --match-filter "like_count>?100 &
|
!is_live --match-filters "like_count>?100 &
|
||||||
description~='(?i)\bcats \& dogs\b'" matches
|
description~='(?i)\bcats \& dogs\b'" matches
|
||||||
only videos that are not live OR those that
|
only videos that are not live OR those that
|
||||||
have a like count more than 100 (or the like
|
have a like count more than 100 (or the like
|
||||||
field is not available) and also has a
|
field is not available) and also has a
|
||||||
description that contains the phrase "cats &
|
description that contains the phrase "cats &
|
||||||
dogs" (caseless). Use "--match-filter -" to
|
dogs" (caseless). Use "--match-filters -" to
|
||||||
interactively ask whether to download each
|
interactively ask whether to download each
|
||||||
video
|
video
|
||||||
--no-match-filters Do not use any --match-filter (default)
|
--no-match-filters Do not use any --match-filters (default)
|
||||||
--break-match-filters FILTER Same as "--match-filters" but stops the
|
--break-match-filters FILTER Same as "--match-filters" but stops the
|
||||||
download process when a video is rejected
|
download process when a video is rejected
|
||||||
--no-break-match-filters Do not use any --break-match-filters (default)
|
--no-break-match-filters Do not use any --break-match-filters (default)
|
||||||
@@ -485,12 +543,13 @@ ## Video Selection:
|
|||||||
--no-download-archive Do not use archive file (default)
|
--no-download-archive Do not use archive file (default)
|
||||||
--max-downloads NUMBER Abort after downloading NUMBER files
|
--max-downloads NUMBER Abort after downloading NUMBER files
|
||||||
--break-on-existing Stop the download process when encountering
|
--break-on-existing Stop the download process when encountering
|
||||||
a file that is in the archive
|
a file that is in the archive supplied with
|
||||||
|
the --download-archive option
|
||||||
--no-break-on-existing Do not stop the download process when
|
--no-break-on-existing Do not stop the download process when
|
||||||
encountering a file that is in the archive
|
encountering a file that is in the archive
|
||||||
(default)
|
(default)
|
||||||
--break-per-input Alters --max-downloads, --break-on-existing,
|
--break-per-input Alters --max-downloads, --break-on-existing,
|
||||||
--break-match-filter, and autonumber to
|
--break-match-filters, and autonumber to
|
||||||
reset per input URL
|
reset per input URL
|
||||||
--no-break-per-input --break-on-existing and similar options
|
--no-break-per-input --break-on-existing and similar options
|
||||||
terminates the entire download queue
|
terminates the entire download queue
|
||||||
@@ -548,8 +607,6 @@ ## Download Options:
|
|||||||
--playlist-random and --playlist-reverse
|
--playlist-random and --playlist-reverse
|
||||||
--no-lazy-playlist Process videos in the playlist only after
|
--no-lazy-playlist Process videos in the playlist only after
|
||||||
the entire playlist is parsed (default)
|
the entire playlist is parsed (default)
|
||||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
|
||||||
expected file size
|
|
||||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||||
allowing some players to play the video
|
allowing some players to play the video
|
||||||
while downloading, and reducing the chance
|
while downloading, and reducing the chance
|
||||||
@@ -573,9 +630,9 @@ ## Download Options:
|
|||||||
use (optionally) prefixed by the protocols
|
use (optionally) prefixed by the protocols
|
||||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||||
use it for. Currently supports native,
|
use it for. Currently supports native,
|
||||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||||
wget. You can use this option multiple times
|
You can use this option multiple times to
|
||||||
to set different downloaders for different
|
set different downloaders for different
|
||||||
protocols. E.g. --downloader aria2c
|
protocols. E.g. --downloader aria2c
|
||||||
--downloader "dash,m3u8:native" will use
|
--downloader "dash,m3u8:native" will use
|
||||||
aria2c for http/ftp downloads, and the
|
aria2c for http/ftp downloads, and the
|
||||||
@@ -617,8 +674,7 @@ ## Filesystem Options:
|
|||||||
--no-restrict-filenames Allow Unicode characters, "&" and spaces in
|
--no-restrict-filenames Allow Unicode characters, "&" and spaces in
|
||||||
filenames (default)
|
filenames (default)
|
||||||
--windows-filenames Force filenames to be Windows-compatible
|
--windows-filenames Force filenames to be Windows-compatible
|
||||||
--no-windows-filenames Make filenames Windows-compatible only if
|
--no-windows-filenames Sanitize filenames only minimally
|
||||||
using Windows (default)
|
|
||||||
--trim-filenames LENGTH Limit the filename length (excluding
|
--trim-filenames LENGTH Limit the filename length (excluding
|
||||||
extension) to the specified number of
|
extension) to the specified number of
|
||||||
characters
|
characters
|
||||||
@@ -637,9 +693,9 @@ ## Filesystem Options:
|
|||||||
--no-part Do not use .part files - write directly into
|
--no-part Do not use .part files - write directly into
|
||||||
output file
|
output file
|
||||||
--mtime Use the Last-modified header to set the file
|
--mtime Use the Last-modified header to set the file
|
||||||
modification time (default)
|
modification time
|
||||||
--no-mtime Do not use the Last-modified header to set
|
--no-mtime Do not use the Last-modified header to set
|
||||||
the file modification time
|
the file modification time (default)
|
||||||
--write-description Write video description to a .description file
|
--write-description Write video description to a .description file
|
||||||
--no-write-description Do not write video description (default)
|
--no-write-description Do not write video description (default)
|
||||||
--write-info-json Write video metadata to a .info.json file
|
--write-info-json Write video metadata to a .info.json file
|
||||||
@@ -732,16 +788,16 @@ ## Verbosity and Simulation Options:
|
|||||||
used. This option can be used multiple times
|
used. This option can be used multiple times
|
||||||
--print-to-file [WHEN:]TEMPLATE FILE
|
--print-to-file [WHEN:]TEMPLATE FILE
|
||||||
Append given template to the file. The
|
Append given template to the file. The
|
||||||
values of WHEN and TEMPLATE are same as that
|
values of WHEN and TEMPLATE are the same as
|
||||||
of --print. FILE uses the same syntax as the
|
that of --print. FILE uses the same syntax
|
||||||
output template. This option can be used
|
as the output template. This option can be
|
||||||
multiple times
|
used multiple times
|
||||||
-j, --dump-json Quiet, but print JSON information for each
|
-j, --dump-json Quiet, but print JSON information for each
|
||||||
video. Simulate unless --no-simulate is
|
video. Simulate unless --no-simulate is
|
||||||
used. See "OUTPUT TEMPLATE" for a
|
used. See "OUTPUT TEMPLATE" for a
|
||||||
description of available keys
|
description of available keys
|
||||||
-J, --dump-single-json Quiet, but print JSON information for each
|
-J, --dump-single-json Quiet, but print JSON information for each
|
||||||
url or infojson passed. Simulate unless
|
URL or infojson passed. Simulate unless
|
||||||
--no-simulate is used. If the URL refers to
|
--no-simulate is used. If the URL refers to
|
||||||
a playlist, the whole playlist information
|
a playlist, the whole playlist information
|
||||||
is dumped in a single line
|
is dumped in a single line
|
||||||
@@ -816,9 +872,9 @@ ## Video Format Options:
|
|||||||
--no-audio-multistreams Only one audio stream is downloaded for each
|
--no-audio-multistreams Only one audio stream is downloaded for each
|
||||||
output file (default)
|
output file (default)
|
||||||
--prefer-free-formats Prefer video formats with free containers
|
--prefer-free-formats Prefer video formats with free containers
|
||||||
over non-free ones of same quality. Use with
|
over non-free ones of the same quality. Use
|
||||||
"-S ext" to strictly prefer free containers
|
with "-S ext" to strictly prefer free
|
||||||
irrespective of quality
|
containers irrespective of quality
|
||||||
--no-prefer-free-formats Don't give any special preference to free
|
--no-prefer-free-formats Don't give any special preference to free
|
||||||
containers (default)
|
containers (default)
|
||||||
--check-formats Make sure formats are selected only from
|
--check-formats Make sure formats are selected only from
|
||||||
@@ -843,15 +899,17 @@ ## Subtitle Options:
|
|||||||
(default) (Alias: --no-write-automatic-subs)
|
(default) (Alias: --no-write-automatic-subs)
|
||||||
--list-subs List available subtitles of each video.
|
--list-subs List available subtitles of each video.
|
||||||
Simulate unless --no-simulate is used
|
Simulate unless --no-simulate is used
|
||||||
--sub-format FORMAT Subtitle format; accepts formats preference,
|
--sub-format FORMAT Subtitle format; accepts formats preference
|
||||||
e.g. "srt" or "ass/srt/best"
|
separated by "/", e.g. "srt" or "ass/srt/best"
|
||||||
--sub-langs LANGS Languages of the subtitles to download (can
|
--sub-langs LANGS Languages of the subtitles to download (can
|
||||||
be regex) or "all" separated by commas, e.g.
|
be regex) or "all" separated by commas, e.g.
|
||||||
--sub-langs "en.*,ja". You can prefix the
|
--sub-langs "en.*,ja" (where "en.*" is a
|
||||||
language code with a "-" to exclude it from
|
regex pattern that matches "en" followed by
|
||||||
the requested languages, e.g. --sub-langs
|
0 or more of any character). You can prefix
|
||||||
all,-live_chat. Use --list-subs for a list
|
the language code with a "-" to exclude it
|
||||||
of available language tags
|
from the requested languages, e.g. --sub-
|
||||||
|
langs all,-live_chat. Use --list-subs for a
|
||||||
|
list of available language tags
|
||||||
|
|
||||||
## Authentication Options:
|
## Authentication Options:
|
||||||
-u, --username USERNAME Login with this account ID
|
-u, --username USERNAME Login with this account ID
|
||||||
@@ -899,9 +957,9 @@ ## Post-Processing Options:
|
|||||||
necessary (currently supported: avi, flv,
|
necessary (currently supported: avi, flv,
|
||||||
gif, mkv, mov, mp4, webm, aac, aiff, alac,
|
gif, mkv, mov, mp4, webm, aac, aiff, alac,
|
||||||
flac, m4a, mka, mp3, ogg, opus, vorbis,
|
flac, m4a, mka, mp3, ogg, opus, vorbis,
|
||||||
wav). If target container does not support
|
wav). If the target container does not
|
||||||
the video/audio codec, remuxing will fail.
|
support the video/audio codec, remuxing will
|
||||||
You can specify multiple rules; e.g.
|
fail. You can specify multiple rules; e.g.
|
||||||
"aac>m4a/mov>mp4/mkv" will remux aac to m4a,
|
"aac>m4a/mov>mp4/mkv" will remux aac to m4a,
|
||||||
mov to mp4 and anything else to mkv
|
mov to mp4 and anything else to mkv
|
||||||
--recode-video FORMAT Re-encode the video into another format if
|
--recode-video FORMAT Re-encode the video into another format if
|
||||||
@@ -969,29 +1027,29 @@ ## Post-Processing Options:
|
|||||||
are the same as that of --use-postprocessor
|
are the same as that of --use-postprocessor
|
||||||
(default: pre_process)
|
(default: pre_process)
|
||||||
--xattrs Write metadata to the video file's xattrs
|
--xattrs Write metadata to the video file's xattrs
|
||||||
(using dublin core and xdg standards)
|
(using Dublin Core and XDG standards)
|
||||||
--concat-playlist POLICY Concatenate videos in a playlist. One of
|
--concat-playlist POLICY Concatenate videos in a playlist. One of
|
||||||
"never", "always", or "multi_video"
|
"never", "always", or "multi_video"
|
||||||
(default; only when the videos form a single
|
(default; only when the videos form a single
|
||||||
show). All the video files must have same
|
show). All the video files must have the
|
||||||
codecs and number of streams to be
|
same codecs and number of streams to be
|
||||||
concatable. The "pl_video:" prefix can be
|
concatenable. The "pl_video:" prefix can be
|
||||||
used with "--paths" and "--output" to set
|
used with "--paths" and "--output" to set
|
||||||
the output filename for the concatenated
|
the output filename for the concatenated
|
||||||
files. See "OUTPUT TEMPLATE" for details
|
files. See "OUTPUT TEMPLATE" for details
|
||||||
--fixup POLICY Automatically correct known faults of the
|
--fixup POLICY Automatically correct known faults of the
|
||||||
file. One of never (do nothing), warn (only
|
file. One of never (do nothing), warn (only
|
||||||
emit a warning), detect_or_warn (the
|
emit a warning), detect_or_warn (the
|
||||||
default; fix file if we can, warn
|
default; fix the file if we can, warn
|
||||||
otherwise), force (try fixing even if file
|
otherwise), force (try fixing even if the
|
||||||
already exists)
|
file already exists)
|
||||||
--ffmpeg-location PATH Location of the ffmpeg binary; either the
|
--ffmpeg-location PATH Location of the ffmpeg binary; either the
|
||||||
path to the binary or its containing directory
|
path to the binary or its containing directory
|
||||||
--exec [WHEN:]CMD Execute a command, optionally prefixed with
|
--exec [WHEN:]CMD Execute a command, optionally prefixed with
|
||||||
when to execute it, separated by a ":".
|
when to execute it, separated by a ":".
|
||||||
Supported values of "WHEN" are the same as
|
Supported values of "WHEN" are the same as
|
||||||
that of --use-postprocessor (default:
|
that of --use-postprocessor (default:
|
||||||
after_move). Same syntax as the output
|
after_move). The same syntax as the output
|
||||||
template can be used to pass any field as
|
template can be used to pass any field as
|
||||||
arguments to the command. If no fields are
|
arguments to the command. If no fields are
|
||||||
passed, %(filepath,_filename|)q is appended
|
passed, %(filepath,_filename|)q is appended
|
||||||
@@ -999,12 +1057,16 @@ ## Post-Processing Options:
|
|||||||
be used multiple times
|
be used multiple times
|
||||||
--no-exec Remove any previously defined --exec
|
--no-exec Remove any previously defined --exec
|
||||||
--convert-subs FORMAT Convert the subtitles to another format
|
--convert-subs FORMAT Convert the subtitles to another format
|
||||||
(currently supported: ass, lrc, srt, vtt)
|
(currently supported: ass, lrc, srt, vtt).
|
||||||
(Alias: --convert-subtitles)
|
Use "--convert-subs none" to disable
|
||||||
|
conversion (default) (Alias: --convert-
|
||||||
|
subtitles)
|
||||||
--convert-thumbnails FORMAT Convert the thumbnails to another format
|
--convert-thumbnails FORMAT Convert the thumbnails to another format
|
||||||
(currently supported: jpg, png, webp). You
|
(currently supported: jpg, png, webp). You
|
||||||
can specify multiple rules using similar
|
can specify multiple rules using similar
|
||||||
syntax as --remux-video
|
syntax as "--remux-video". Use "--convert-
|
||||||
|
thumbnails none" to disable conversion
|
||||||
|
(default)
|
||||||
--split-chapters Split video into multiple files based on
|
--split-chapters Split video into multiple files based on
|
||||||
internal chapters. The "chapter:" prefix can
|
internal chapters. The "chapter:" prefix can
|
||||||
be used with "--paths" and "--output" to set
|
be used with "--paths" and "--output" to set
|
||||||
@@ -1025,7 +1087,7 @@ ## Post-Processing Options:
|
|||||||
--no-force-keyframes-at-cuts Do not force keyframes around the chapters
|
--no-force-keyframes-at-cuts Do not force keyframes around the chapters
|
||||||
when cutting/splitting (default)
|
when cutting/splitting (default)
|
||||||
--use-postprocessor NAME[:ARGS]
|
--use-postprocessor NAME[:ARGS]
|
||||||
The (case sensitive) name of plugin
|
The (case-sensitive) name of plugin
|
||||||
postprocessors to be enabled, and
|
postprocessors to be enabled, and
|
||||||
(optionally) arguments to be passed to it,
|
(optionally) arguments to be passed to it,
|
||||||
separated by a colon ":". ARGS are a
|
separated by a colon ":". ARGS are a
|
||||||
@@ -1038,8 +1100,8 @@ ## Post-Processing Options:
|
|||||||
--print/--output), "before_dl" (before each
|
--print/--output), "before_dl" (before each
|
||||||
video download), "post_process" (after each
|
video download), "post_process" (after each
|
||||||
video download; default), "after_move"
|
video download; default), "after_move"
|
||||||
(after moving video file to its final
|
(after moving the video file to its final
|
||||||
locations), "after_video" (after downloading
|
location), "after_video" (after downloading
|
||||||
and processing all formats of a video), or
|
and processing all formats of a video), or
|
||||||
"playlist" (at end of playlist). This option
|
"playlist" (at end of playlist). This option
|
||||||
can be used multiple times to add different
|
can be used multiple times to add different
|
||||||
@@ -1054,11 +1116,12 @@ ## SponsorBlock Options:
|
|||||||
for, separated by commas. Available
|
for, separated by commas. Available
|
||||||
categories are sponsor, intro, outro,
|
categories are sponsor, intro, outro,
|
||||||
selfpromo, preview, filler, interaction,
|
selfpromo, preview, filler, interaction,
|
||||||
music_offtopic, poi_highlight, chapter, all
|
music_offtopic, hook, poi_highlight,
|
||||||
and default (=all). You can prefix the
|
chapter, all and default (=all). You can
|
||||||
category with a "-" to exclude it. See [1]
|
prefix the category with a "-" to exclude
|
||||||
for description of the categories. E.g.
|
it. See [1] for descriptions of the
|
||||||
--sponsorblock-mark all,-preview
|
categories. E.g. --sponsorblock-mark
|
||||||
|
all,-preview
|
||||||
[1] https://wiki.sponsor.ajay.app/w/Segment_Categories
|
[1] https://wiki.sponsor.ajay.app/w/Segment_Categories
|
||||||
--sponsorblock-remove CATS SponsorBlock categories to be removed from
|
--sponsorblock-remove CATS SponsorBlock categories to be removed from
|
||||||
the video file, separated by commas. If a
|
the video file, separated by commas. If a
|
||||||
@@ -1089,7 +1152,7 @@ ## Extractor Options:
|
|||||||
(Alias: --no-allow-dynamic-mpd)
|
(Alias: --no-allow-dynamic-mpd)
|
||||||
--hls-split-discontinuity Split HLS playlists to different formats at
|
--hls-split-discontinuity Split HLS playlists to different formats at
|
||||||
discontinuities such as ad breaks
|
discontinuities such as ad breaks
|
||||||
--no-hls-split-discontinuity Do not split HLS playlists to different
|
--no-hls-split-discontinuity Do not split HLS playlists into different
|
||||||
formats at discontinuities such as ad breaks
|
formats at discontinuities such as ad breaks
|
||||||
(default)
|
(default)
|
||||||
--extractor-args IE_KEY:ARGS Pass ARGS arguments to the IE_KEY extractor.
|
--extractor-args IE_KEY:ARGS Pass ARGS arguments to the IE_KEY extractor.
|
||||||
@@ -1097,12 +1160,33 @@ ## Extractor Options:
|
|||||||
can use this option multiple times to give
|
can use this option multiple times to give
|
||||||
arguments for different extractors
|
arguments for different extractors
|
||||||
|
|
||||||
|
## Preset Aliases:
|
||||||
|
Predefined aliases for convenience and ease of use. Note that future
|
||||||
|
versions of yt-dlp may add or adjust presets, but the existing preset
|
||||||
|
names will not be changed or removed
|
||||||
|
|
||||||
|
-t mp3 -f 'ba[acodec^=mp3]/ba/b' -x --audio-format
|
||||||
|
mp3
|
||||||
|
|
||||||
|
-t aac -f
|
||||||
|
'ba[acodec^=aac]/ba[acodec^=mp4a.40.]/ba/b'
|
||||||
|
-x --audio-format aac
|
||||||
|
|
||||||
|
-t mp4 --merge-output-format mp4 --remux-video mp4
|
||||||
|
-S vcodec:h264,lang,quality,res,fps,hdr:12,a
|
||||||
|
codec:aac
|
||||||
|
|
||||||
|
-t mkv --merge-output-format mkv --remux-video mkv
|
||||||
|
|
||||||
|
-t sleep --sleep-subtitles 5 --sleep-requests 0.75
|
||||||
|
--sleep-interval 10 --max-sleep-interval 20
|
||||||
|
|
||||||
# CONFIGURATION
|
# CONFIGURATION
|
||||||
|
|
||||||
You can configure yt-dlp by placing any supported command line option to a configuration file. The configuration is loaded from the following locations:
|
You can configure yt-dlp by placing any supported command line option in a configuration file. The configuration is loaded from the following locations:
|
||||||
|
|
||||||
1. **Main Configuration**:
|
1. **Main Configuration**:
|
||||||
* The file given to `--config-location`
|
* The file given to `--config-locations`
|
||||||
1. **Portable Configuration**: (Recommended for portable installations)
|
1. **Portable Configuration**: (Recommended for portable installations)
|
||||||
* If using a binary, `yt-dlp.conf` in the same directory as the binary
|
* If using a binary, `yt-dlp.conf` in the same directory as the binary
|
||||||
* If running from source-code, `yt-dlp.conf` in the parent directory of `yt_dlp`
|
* If running from source-code, `yt-dlp.conf` in the parent directory of `yt_dlp`
|
||||||
@@ -1127,15 +1211,15 @@ # CONFIGURATION
|
|||||||
* `/etc/yt-dlp/config`
|
* `/etc/yt-dlp/config`
|
||||||
* `/etc/yt-dlp/config.txt`
|
* `/etc/yt-dlp/config.txt`
|
||||||
|
|
||||||
E.g. with the following configuration file, yt-dlp will always extract the audio, not copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
E.g. with the following configuration file, yt-dlp will always extract the audio, copy the mtime, use a proxy and save all videos under `YouTube` directory in your home directory:
|
||||||
```
|
```
|
||||||
# Lines starting with # are comments
|
# Lines starting with # are comments
|
||||||
|
|
||||||
# Always extract audio
|
# Always extract audio
|
||||||
-x
|
-x
|
||||||
|
|
||||||
# Do not copy the mtime
|
# Copy the mtime
|
||||||
--no-mtime
|
--mtime
|
||||||
|
|
||||||
# Use this proxy
|
# Use this proxy
|
||||||
--proxy 127.0.0.1:3128
|
--proxy 127.0.0.1:3128
|
||||||
@@ -1144,7 +1228,7 @@ # Save all videos under YouTube directory in your home directory
|
|||||||
-o ~/YouTube/%(title)s.%(ext)s
|
-o ~/YouTube/%(title)s.%(ext)s
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note**: Options in configuration file are just the same options aka switches used in regular command line calls; thus there **must be no whitespace** after `-` or `--`, e.g. `-o` or `--proxy` but not `- o` or `-- proxy`. They must also be quoted when necessary, as if it were a UNIX shell.
|
**Note**: Options in a configuration file are just the same options aka switches used in regular command line calls; thus there **must be no whitespace** after `-` or `--`, e.g. `-o` or `--proxy` but not `- o` or `-- proxy`. They must also be quoted when necessary, as if it were a UNIX shell.
|
||||||
|
|
||||||
You can use `--ignore-config` if you want to disable all configuration files for a particular yt-dlp run. If `--ignore-config` is found inside any configuration file, no further configuration will be loaded. For example, having the option in the portable configuration file prevents loading of home, user, and system configurations. Additionally, (for backward compatibility) if `--ignore-config` is found inside the system configuration file, the user configuration is not loaded.
|
You can use `--ignore-config` if you want to disable all configuration files for a particular yt-dlp run. If `--ignore-config` is found inside any configuration file, no further configuration will be loaded. For example, having the option in the portable configuration file prevents loading of home, user, and system configurations. Additionally, (for backward compatibility) if `--ignore-config` is found inside the system configuration file, the user configuration is not loaded.
|
||||||
|
|
||||||
@@ -1178,13 +1262,13 @@ ### Authentication with netrc
|
|||||||
|
|
||||||
E.g. To use an encrypted `.netrc` file stored as `.authinfo.gpg`
|
E.g. To use an encrypted `.netrc` file stored as `.authinfo.gpg`
|
||||||
```
|
```
|
||||||
yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' https://www.youtube.com/watch?v=BaW_jenozKc
|
yt-dlp --netrc-cmd 'gpg --decrypt ~/.authinfo.gpg' 'https://www.youtube.com/watch?v=BaW_jenozKc'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Notes about environment variables
|
### Notes about environment variables
|
||||||
* Environment variables are normally specified as `${VARIABLE}`/`$VARIABLE` on UNIX and `%VARIABLE%` on Windows; but is always shown as `${VARIABLE}` in this documentation
|
* Environment variables are normally specified as `${VARIABLE}`/`$VARIABLE` on UNIX and `%VARIABLE%` on Windows; but is always shown as `${VARIABLE}` in this documentation
|
||||||
* yt-dlp also allow using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-location`
|
* yt-dlp also allows using UNIX-style variables on Windows for path-like options; e.g. `--output`, `--config-locations`
|
||||||
* If unset, `${XDG_CONFIG_HOME}` defaults to `~/.config` and `${XDG_CACHE_HOME}` to `~/.cache`
|
* If unset, `${XDG_CONFIG_HOME}` defaults to `~/.config` and `${XDG_CACHE_HOME}` to `~/.cache`
|
||||||
* On Windows, `~` points to `${HOME}` if present; or, `${USERPROFILE}` or `${HOMEDRIVE}${HOMEPATH}` otherwise
|
* On Windows, `~` points to `${HOME}` if present; or, `${USERPROFILE}` or `${HOMEDRIVE}${HOMEPATH}` otherwise
|
||||||
* On Windows, `${USERPROFILE}` generally points to `C:\Users\<user name>` and `${APPDATA}` to `${USERPROFILE}\AppData\Roaming`
|
* On Windows, `${USERPROFILE}` generally points to `C:\Users\<user name>` and `${APPDATA}` to `${USERPROFILE}\AppData\Roaming`
|
||||||
@@ -1265,7 +1349,7 @@ # OUTPUT TEMPLATE
|
|||||||
- `like_count` (numeric): Number of positive ratings of the video
|
- `like_count` (numeric): Number of positive ratings of the video
|
||||||
- `dislike_count` (numeric): Number of negative ratings of the video
|
- `dislike_count` (numeric): Number of negative ratings of the video
|
||||||
- `repost_count` (numeric): Number of reposts of the video
|
- `repost_count` (numeric): Number of reposts of the video
|
||||||
- `average_rating` (numeric): Average rating give by users, the scale used depends on the webpage
|
- `average_rating` (numeric): Average rating given by users, the scale used depends on the webpage
|
||||||
- `comment_count` (numeric): Number of comments on the video (For some extractors, comments are only downloaded at the end, and so this field cannot be used)
|
- `comment_count` (numeric): Number of comments on the video (For some extractors, comments are only downloaded at the end, and so this field cannot be used)
|
||||||
- `age_limit` (numeric): Age restriction for the video (years)
|
- `age_limit` (numeric): Age restriction for the video (years)
|
||||||
- `live_status` (string): One of "not_live", "is_live", "is_upcoming", "was_live", "post_live" (was live, but VOD is not yet processed)
|
- `live_status` (string): One of "not_live", "is_live", "is_upcoming", "was_live", "post_live" (was live, but VOD is not yet processed)
|
||||||
@@ -1292,10 +1376,11 @@ # OUTPUT TEMPLATE
|
|||||||
- `playlist_uploader_id` (string): Nickname or id of the playlist uploader
|
- `playlist_uploader_id` (string): Nickname or id of the playlist uploader
|
||||||
- `playlist_channel` (string): Display name of the channel that uploaded the playlist
|
- `playlist_channel` (string): Display name of the channel that uploaded the playlist
|
||||||
- `playlist_channel_id` (string): Identifier of the channel that uploaded the playlist
|
- `playlist_channel_id` (string): Identifier of the channel that uploaded the playlist
|
||||||
|
- `playlist_webpage_url` (string): URL of the playlist webpage
|
||||||
- `webpage_url` (string): A URL to the video webpage which, if given to yt-dlp, should yield the same result again
|
- `webpage_url` (string): A URL to the video webpage which, if given to yt-dlp, should yield the same result again
|
||||||
- `webpage_url_basename` (string): The basename of the webpage URL
|
- `webpage_url_basename` (string): The basename of the webpage URL
|
||||||
- `webpage_url_domain` (string): The domain of the webpage URL
|
- `webpage_url_domain` (string): The domain of the webpage URL
|
||||||
- `original_url` (string): The URL given by the user (or same as `webpage_url` for playlist entries)
|
- `original_url` (string): The URL given by the user (or the same as `webpage_url` for playlist entries)
|
||||||
- `categories` (list): List of categories the video belongs to
|
- `categories` (list): List of categories the video belongs to
|
||||||
- `tags` (list): List of tags assigned to the video
|
- `tags` (list): List of tags assigned to the video
|
||||||
- `cast` (list): List of cast members
|
- `cast` (list): List of cast members
|
||||||
@@ -1372,7 +1457,7 @@ # OUTPUT TEMPLATE
|
|||||||
|
|
||||||
**Tip**: Look at the `-j` output to identify which fields are available for the particular URL
|
**Tip**: Look at the `-j` output to identify which fields are available for the particular URL
|
||||||
|
|
||||||
For numeric sequences you can use [numeric related formatting](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting); e.g. `%(view_count)05d` will result in a string with view count padded with zeros up to 5 characters, like in `00042`.
|
For numeric sequences, you can use [numeric related formatting](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting); e.g. `%(view_count)05d` will result in a string with view count padded with zeros up to 5 characters, like in `00042`.
|
||||||
|
|
||||||
Output templates can also contain arbitrary hierarchical path, e.g. `-o "%(playlist)s/%(playlist_index)s - %(title)s.%(ext)s"` which will result in downloading each video in a directory corresponding to this path template. Any missing directory will be automatically created for you.
|
Output templates can also contain arbitrary hierarchical path, e.g. `-o "%(playlist)s/%(playlist_index)s - %(title)s.%(ext)s"` which will result in downloading each video in a directory corresponding to this path template. Any missing directory will be automatically created for you.
|
||||||
|
|
||||||
@@ -1414,7 +1499,7 @@ # Download entire series season keeping each series and each season in separate
|
|||||||
|
|
||||||
# Download video as "C:\MyVideos\uploader\title.ext", subtitles as "C:\MyVideos\subs\uploader\title.ext"
|
# Download video as "C:\MyVideos\uploader\title.ext", subtitles as "C:\MyVideos\subs\uploader\title.ext"
|
||||||
# and put all temporary files in "C:\MyVideos\tmp"
|
# and put all temporary files in "C:\MyVideos\tmp"
|
||||||
$ yt-dlp -P "C:/MyVideos" -P "temp:tmp" -P "subtitle:subs" -o "%(uploader)s/%(title)s.%(ext)s" BaW_jenoz --write-subs
|
$ yt-dlp -P "C:/MyVideos" -P "temp:tmp" -P "subtitle:subs" -o "%(uploader)s/%(title)s.%(ext)s" BaW_jenozKc --write-subs
|
||||||
|
|
||||||
# Download video as "C:\MyVideos\uploader\title.ext" and subtitles as "C:\MyVideos\uploader\subs\title.ext"
|
# Download video as "C:\MyVideos\uploader\title.ext" and subtitles as "C:\MyVideos\uploader\subs\title.ext"
|
||||||
$ yt-dlp -P "C:/MyVideos" -o "%(uploader)s/%(title)s.%(ext)s" -o "subtitle:%(uploader)s/subs/%(title)s.%(ext)s" BaW_jenozKc --write-subs
|
$ yt-dlp -P "C:/MyVideos" -o "%(uploader)s/%(title)s.%(ext)s" -o "subtitle:%(uploader)s/subs/%(title)s.%(ext)s" BaW_jenozKc --write-subs
|
||||||
@@ -1524,7 +1609,7 @@ ## Sorting Formats
|
|||||||
- `hasvid`: Gives priority to formats that have a video stream
|
- `hasvid`: Gives priority to formats that have a video stream
|
||||||
- `hasaud`: Gives priority to formats that have an audio stream
|
- `hasaud`: Gives priority to formats that have an audio stream
|
||||||
- `ie_pref`: The format preference
|
- `ie_pref`: The format preference
|
||||||
- `lang`: The language preference
|
- `lang`: The language preference as determined by the extractor (e.g. original language preferred over audio description)
|
||||||
- `quality`: The quality of the format
|
- `quality`: The quality of the format
|
||||||
- `source`: The preference of the source
|
- `source`: The preference of the source
|
||||||
- `proto`: Protocol used for download (`https`/`ftps` > `http`/`ftp` > `m3u8_native`/`m3u8` > `http_dash_segments`> `websocket_frag` > `mms`/`rtsp` > `f4f`/`f4m`)
|
- `proto`: Protocol used for download (`https`/`ftps` > `http`/`ftp` > `m3u8_native`/`m3u8` > `http_dash_segments`> `websocket_frag` > `mms`/`rtsp` > `f4f`/`f4m`)
|
||||||
@@ -1553,9 +1638,9 @@ ## Sorting Formats
|
|||||||
|
|
||||||
All fields, unless specified otherwise, are sorted in descending order. To reverse this, prefix the field with a `+`. E.g. `+res` prefers format with the smallest resolution. Additionally, you can suffix a preferred value for the fields, separated by a `:`. E.g. `res:720` prefers larger videos, but no larger than 720p and the smallest video if there are no videos less than 720p. For `codec` and `ext`, you can provide two preferred values, the first for video and the second for audio. E.g. `+codec:avc:m4a` (equivalent to `+vcodec:avc,+acodec:m4a`) sets the video codec preference to `h264` > `h265` > `vp9` > `vp9.2` > `av01` > `vp8` > `h263` > `theora` and audio codec preference to `mp4a` > `aac` > `vorbis` > `opus` > `mp3` > `ac3` > `dts`. You can also make the sorting prefer the nearest values to the provided by using `~` as the delimiter. E.g. `filesize~1G` prefers the format with filesize closest to 1 GiB.
|
All fields, unless specified otherwise, are sorted in descending order. To reverse this, prefix the field with a `+`. E.g. `+res` prefers format with the smallest resolution. Additionally, you can suffix a preferred value for the fields, separated by a `:`. E.g. `res:720` prefers larger videos, but no larger than 720p and the smallest video if there are no videos less than 720p. For `codec` and `ext`, you can provide two preferred values, the first for video and the second for audio. E.g. `+codec:avc:m4a` (equivalent to `+vcodec:avc,+acodec:m4a`) sets the video codec preference to `h264` > `h265` > `vp9` > `vp9.2` > `av01` > `vp8` > `h263` > `theora` and audio codec preference to `mp4a` > `aac` > `vorbis` > `opus` > `mp3` > `ac3` > `dts`. You can also make the sorting prefer the nearest values to the provided by using `~` as the delimiter. E.g. `filesize~1G` prefers the format with filesize closest to 1 GiB.
|
||||||
|
|
||||||
The fields `hasvid` and `ie_pref` are always given highest priority in sorting, irrespective of the user-defined order. This behavior can be changed by using `--format-sort-force`. Apart from these, the default order used is: `lang,quality,res,fps,hdr:12,vcodec:vp9.2,channels,acodec,size,br,asr,proto,ext,hasaud,source,id`. The extractors may override this default order, but they cannot override the user-provided order.
|
The fields `hasvid` and `ie_pref` are always given highest priority in sorting, irrespective of the user-defined order. This behavior can be changed by using `--format-sort-force`. Apart from these, the default order used is: `lang,quality,res,fps,hdr:12,vcodec,channels,acodec,size,br,asr,proto,ext,hasaud,source,id`. The extractors may override this default order, but they cannot override the user-provided order.
|
||||||
|
|
||||||
Note that the default has `vcodec:vp9.2`; i.e. `av1` is not preferred. Similarly, the default for hdr is `hdr:12`; i.e. Dolby Vision is not preferred. These choices are made since DV and AV1 formats are not yet fully compatible with most devices. This may be changed in the future as more devices become capable of smoothly playing back these formats.
|
Note that the default for hdr is `hdr:12`; i.e. Dolby Vision is not preferred. This choice was made since DV formats are not yet fully compatible with most devices. This may be changed in the future.
|
||||||
|
|
||||||
If your format selector is `worst`, the last item is selected after sorting. This means it will select the format that is worst in all respects. Most of the time, what you actually want is the video with the smallest filesize instead. So it is generally better to use `-f best -S +size,+br,+res,+fps`.
|
If your format selector is `worst`, the last item is selected after sorting. This means it will select the format that is worst in all respects. Most of the time, what you actually want is the video with the smallest filesize instead. So it is generally better to use `-f best -S +size,+br,+res,+fps`.
|
||||||
|
|
||||||
@@ -1632,11 +1717,11 @@ # Download the best video (that also has audio) but no bigger than 50 MB,
|
|||||||
# or the worst video (that also has audio) if there is no video under 50 MB
|
# or the worst video (that also has audio) if there is no video under 50 MB
|
||||||
$ yt-dlp -f "b[filesize<50M] / w"
|
$ yt-dlp -f "b[filesize<50M] / w"
|
||||||
|
|
||||||
# Download largest video (that also has audio) but no bigger than 50 MB,
|
# Download the largest video (that also has audio) but no bigger than 50 MB,
|
||||||
# or the smallest video (that also has audio) if there is no video under 50 MB
|
# or the smallest video (that also has audio) if there is no video under 50 MB
|
||||||
$ yt-dlp -f "b" -S "filesize:50M"
|
$ yt-dlp -f "b" -S "filesize:50M"
|
||||||
|
|
||||||
# Download best video (that also has audio) that is closest in size to 50 MB
|
# Download the best video (that also has audio) that is closest in size to 50 MB
|
||||||
$ yt-dlp -f "b" -S "filesize~50M"
|
$ yt-dlp -f "b" -S "filesize~50M"
|
||||||
|
|
||||||
|
|
||||||
@@ -1758,25 +1843,40 @@ # Replace all spaces and "_" in title and uploader with a `-`
|
|||||||
|
|
||||||
# EXTRACTOR ARGUMENTS
|
# EXTRACTOR ARGUMENTS
|
||||||
|
|
||||||
Some extractors accept additional arguments which can be passed using `--extractor-args KEY:ARGS`. `ARGS` is a `;` (semicolon) separated string of `ARG=VAL1,VAL2`. E.g. `--extractor-args "youtube:player-client=android_embedded,web;formats=incomplete" --extractor-args "funimation:version=uncut"`
|
Some extractors accept additional arguments which can be passed using `--extractor-args KEY:ARGS`. `ARGS` is a `;` (semicolon) separated string of `ARG=VAL1,VAL2`. E.g. `--extractor-args "youtube:player-client=tv,mweb;formats=incomplete" --extractor-args "twitter:api=syndication"`
|
||||||
|
|
||||||
Note: In CLI, `ARG` can use `-` instead of `_`; e.g. `youtube:player-client"` becomes `youtube:player_client"`
|
Note: In CLI, `ARG` can use `-` instead of `_`; e.g. `youtube:player-client"` becomes `youtube:player_client"`
|
||||||
|
|
||||||
The following extractors use this feature:
|
The following extractors use this feature:
|
||||||
|
|
||||||
#### youtube
|
#### youtube
|
||||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube.py](https://github.com/yt-dlp/yt-dlp/blob/c26f9b991a0681fd3ea548d535919cec1fbbd430/yt_dlp/extractor/youtube.py#L381-L390) for list of supported content language codes
|
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||||
* `player_client`: Clients to extract video data from. The main clients are `web`, `ios` and `android`, with variants `_music`, `_embedded`, `_embedscreen`, `_creator` (e.g. `web_embedded`); and `mediaconnect`, `mweb`, `mweb_embedscreen` and `tv_embedded` (agegate bypass) with no variants. By default, `ios,web` is used, but `tv_embedded` and `creator` variants are added as required for age-gated videos. Similarly, the music variants are added for `music.youtube.com` urls. The `android` clients will always be given lowest priority since their formats are broken. You can use `all` to use all the clients, and `default` for the default clients.
|
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply`, `tv_downgraded`, and `tv_embedded`. By default, `tv,android_sdkless,web` is used. If no JavaScript runtime is available, then `android_sdkless,web_safari,web` is used. If logged-in cookies are passed to yt-dlp, then `tv_downgraded,web_safari,web` is used for free accounts and `tv_downgraded,web_creator,web` is used for premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause some issues. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) for more details
|
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||||
|
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||||
|
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||||
|
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash` (e.g. `20348@0004de42`). The default is to use what is prescribed by the site, and can be selected with `actual`
|
||||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||||
* `formats`: Change the types of formats to return. `dashy` (convert HTTP to DASH), `duplicate` (identical content but different URLs or protocol; includes `dashy`), `incomplete` (cannot be downloaded completely - live dash and post-live m3u8)
|
* `formats`: Change the types of formats to return. `dashy` (convert HTTP to DASH), `duplicate` (identical content but different URLs or protocol; includes `dashy`), `incomplete` (cannot be downloaded completely - live dash and post-live m3u8), `missing_pot` (include formats that require a PO Token but are missing one)
|
||||||
* `innertube_host`: Innertube API host to use for all API requests; e.g. `studio.youtube.com`, `youtubei.googleapis.com`. Note that cookies exported from one subdomain will not work on others
|
* `innertube_host`: Innertube API host to use for all API requests; e.g. `studio.youtube.com`, `youtubei.googleapis.com`. Note that cookies exported from one subdomain will not work on others
|
||||||
* `innertube_key`: Innertube API key to use for all API requests
|
* `innertube_key`: Innertube API key to use for all API requests. By default, no API key is used
|
||||||
* `raise_incomplete_data`: `Incomplete Data Received` raises an error instead of reporting a warning
|
* `raise_incomplete_data`: `Incomplete Data Received` raises an error instead of reporting a warning
|
||||||
|
* `data_sync_id`: Overrides the account Data Sync ID used in Innertube API requests. This may be needed if you are using an account with `youtube:player_skip=webpage,configs` or `youtubetab:skip=webpage`
|
||||||
|
* `visitor_data`: Overrides the Visitor Data used in Innertube API requests. This should be used with `player_skip=webpage,configs` and without cookies. Note: this may have adverse effects if used improperly. If a session from a browser is wanted, you should pass cookies instead (which contain the Visitor ID)
|
||||||
|
* `po_token`: Proof of Origin (PO) Token(s) to use. Comma seperated list of PO Tokens in the format `CLIENT.CONTEXT+PO_TOKEN`, e.g. `youtube:po_token=web.gvs+XXX,web.player=XXX,web_safari.gvs+YYY`. Context can be any of `gvs` (Google Video Server URLs), `player` (Innertube player request) or `subs` (Subtitles)
|
||||||
|
* `pot_trace`: Enable debug logging for PO Token fetching. Either `true` or `false` (default)
|
||||||
|
* `fetch_pot`: Policy to use for fetching a PO Token from providers. One of `always` (always try fetch a PO Token regardless if the client requires one for the given context), `never` (never fetch a PO Token), or `auto` (default; only fetch a PO Token if the client requires one for the given context)
|
||||||
|
* `jsc_trace`: Enable debug logging for JS Challenge fetching. Either `true` or `false` (default)
|
||||||
|
|
||||||
|
#### youtube-ejs
|
||||||
|
* `jitless`: Run suported Javascript engines in JIT-less mode. Supported runtimes are `deno`, `node` and `bun`. Provides better security at the cost of performance/speed. Do note that `node` and `bun` are still considered unsecure. Either `true` or `false` (default)
|
||||||
|
|
||||||
|
#### youtubepot-webpo
|
||||||
|
* `bind_to_visitor_id`: Whether to use the Visitor ID instead of Visitor Data for caching WebPO tokens. Either `true` (default) or `false`
|
||||||
|
|
||||||
#### youtubetab (YouTube playlists, channels, feeds, etc.)
|
#### youtubetab (YouTube playlists, channels, feeds, etc.)
|
||||||
* `skip`: One or more of `webpage` (skip initial webpage download), `authcheck` (allow the download of playlists requiring authentication when no initial webpage is downloaded. This may cause unwanted behavior, see [#1122](https://github.com/yt-dlp/yt-dlp/pull/1122) for more details)
|
* `skip`: One or more of `webpage` (skip initial webpage download), `authcheck` (allow the download of playlists requiring authentication when no initial webpage is downloaded. This may cause unwanted behavior, see [#1122](https://github.com/yt-dlp/yt-dlp/pull/1122) for more details)
|
||||||
@@ -1788,20 +1888,11 @@ #### generic
|
|||||||
* `key_query`: Passthrough the master m3u8 URL query to its HLS AES-128 decryption key URI if no value is provided, or else apply the query string given as `key_query=VALUE`. Note that this will have no effect if the key URI is provided via the `hls_key` extractor-arg. Does not apply to ffmpeg
|
* `key_query`: Passthrough the master m3u8 URL query to its HLS AES-128 decryption key URI if no value is provided, or else apply the query string given as `key_query=VALUE`. Note that this will have no effect if the key URI is provided via the `hls_key` extractor-arg. Does not apply to ffmpeg
|
||||||
* `hls_key`: An HLS AES-128 key URI *or* key (as hex), and optionally the IV (as hex), in the form of `(URI|KEY)[,IV]`; e.g. `generic:hls_key=ABCDEF1234567980,0xFEDCBA0987654321`. Passing any of these values will force usage of the native HLS downloader and override the corresponding values found in the m3u8 playlist
|
* `hls_key`: An HLS AES-128 key URI *or* key (as hex), and optionally the IV (as hex), in the form of `(URI|KEY)[,IV]`; e.g. `generic:hls_key=ABCDEF1234567980,0xFEDCBA0987654321`. Passing any of these values will force usage of the native HLS downloader and override the corresponding values found in the m3u8 playlist
|
||||||
* `is_live`: Bypass live HLS detection and manually set `live_status` - a value of `false` will set `not_live`, any other value (or no value) will set `is_live`
|
* `is_live`: Bypass live HLS detection and manually set `live_status` - a value of `false` will set `not_live`, any other value (or no value) will set `is_live`
|
||||||
|
* `impersonate`: Target(s) to try and impersonate with the initial webpage request; e.g. `generic:impersonate=safari,chrome-110`. Use `generic:impersonate` to impersonate any available target, and use `generic:impersonate=false` to disable impersonation (default)
|
||||||
#### funimation
|
|
||||||
* `language`: Audio languages to extract, e.g. `funimation:language=english,japanese`
|
|
||||||
* `version`: The video version to extract - `uncut` or `simulcast`
|
|
||||||
|
|
||||||
#### crunchyrollbeta (Crunchyroll)
|
|
||||||
* `hardsub`: One or more hardsub versions to extract (in order of preference), or `all` (default: `None` = no hardsubs will be extracted), e.g. `crunchyrollbeta:hardsub=en-US,de-DE`
|
|
||||||
|
|
||||||
#### vikichannel
|
#### vikichannel
|
||||||
* `video_types`: Types of videos to download - one or more of `episodes`, `movies`, `clips`, `trailers`
|
* `video_types`: Types of videos to download - one or more of `episodes`, `movies`, `clips`, `trailers`
|
||||||
|
|
||||||
#### niconico
|
|
||||||
* `segment_duration`: Segment duration in milliseconds for HLS-DMC formats. Use it at your own risk since this feature **may result in your account termination.**
|
|
||||||
|
|
||||||
#### youtubewebarchive
|
#### youtubewebarchive
|
||||||
* `check_all`: Try to check more at the cost of more requests. One or more of `thumbnails`, `captures`
|
* `check_all`: Try to check more at the cost of more requests. One or more of `thumbnails`, `captures`
|
||||||
|
|
||||||
@@ -1813,6 +1904,9 @@ #### hotstar
|
|||||||
* `vcodec`: vcodec to ignore - one or more of `h264`, `h265`, `dvh265`
|
* `vcodec`: vcodec to ignore - one or more of `h264`, `h265`, `dvh265`
|
||||||
* `dr`: dynamic range to ignore - one or more of `sdr`, `hdr10`, `dv`
|
* `dr`: dynamic range to ignore - one or more of `sdr`, `hdr10`, `dv`
|
||||||
|
|
||||||
|
#### instagram
|
||||||
|
* `app_id`: The value of the `X-IG-App-ID` header used for API requests. Default is the web app ID, `936619743392459`
|
||||||
|
|
||||||
#### niconicochannelplus
|
#### niconicochannelplus
|
||||||
* `max_comments`: Maximum number of comments to extract - default is `120`
|
* `max_comments`: Maximum number of comments to extract - default is `120`
|
||||||
|
|
||||||
@@ -1853,7 +1947,7 @@ #### afreecatvlive
|
|||||||
* `cdn`: One or more CDN IDs to use with the API call for stream URLs, e.g. `gcp_cdn`, `gs_cdn_pc_app`, `gs_cdn_mobile_web`, `gs_cdn_pc_web`
|
* `cdn`: One or more CDN IDs to use with the API call for stream URLs, e.g. `gcp_cdn`, `gs_cdn_pc_app`, `gs_cdn_mobile_web`, `gs_cdn_pc_web`
|
||||||
|
|
||||||
#### soundcloud
|
#### soundcloud
|
||||||
* `formats`: Formats to request from the API. Requested values should be in the format of `{protocol}_{extension}` (omitting the bitrate), e.g. `hls_opus,http_aac`. The `*` character functions as a wildcard, e.g. `*_mp3`, and can be passed by itself to request all formats. Known protocols include `http`, `hls` and `hls-aes`; known extensions include `aac`, `opus` and `mp3`. Original `download` formats are always extracted. Default is `http_aac,hls_aac,http_opus,hls_opus,http_mp3,hls_mp3`
|
* `formats`: Formats to request from the API. Requested values should be in the format of `{protocol}_{codec}`, e.g. `hls_opus,http_aac`. The `*` character functions as a wildcard, e.g. `*_mp3`, and can be passed by itself to request all formats. Known protocols include `http`, `hls` and `hls-aes`; known codecs include `aac`, `opus` and `mp3`. Original `download` formats are always extracted. Default is `http_aac,hls_aac,http_opus,hls_opus,http_mp3,hls_mp3`
|
||||||
|
|
||||||
#### orfon (orf:on)
|
#### orfon (orf:on)
|
||||||
* `prefer_segments_playlist`: Prefer a playlist of program segments instead of a single complete video when available. If individual segments are desired, use `--concat-playlist never --extractor-args "orfon:prefer_segments_playlist"`
|
* `prefer_segments_playlist`: Prefer a playlist of program segments instead of a single complete video when available. If individual segments are desired, use `--concat-playlist never --extractor-args "orfon:prefer_segments_playlist"`
|
||||||
@@ -1861,8 +1955,15 @@ #### orfon (orf:on)
|
|||||||
#### bilibili
|
#### bilibili
|
||||||
* `prefer_multi_flv`: Prefer extracting flv formats over mp4 for older videos that still provide legacy formats
|
* `prefer_multi_flv`: Prefer extracting flv formats over mp4 for older videos that still provide legacy formats
|
||||||
|
|
||||||
#### digitalconcerthall
|
#### sonylivseries
|
||||||
* `prefer_combined_hls`: Prefer extracting combined/pre-merged video and audio HLS formats. This will exclude 4K/HEVC video and lossless/FLAC audio formats, which are only available as split video/audio HLS formats
|
* `sort_order`: Episode sort order for series extraction - one of `asc` (ascending, oldest first) or `desc` (descending, newest first). Default is `asc`
|
||||||
|
|
||||||
|
#### tver
|
||||||
|
* `backend`: Backend API to use for extraction - one of `streaks` (default) or `brightcove` (deprecated)
|
||||||
|
|
||||||
|
#### vimeo
|
||||||
|
* `client`: Client to extract video data from. The currently available clients are `android`, `ios`, and `web`. Only one client can be used. The `web` client is used by default. The `web` client only works with account cookies or login credentials. The `android` and `ios` clients only work with previously cached OAuth tokens
|
||||||
|
* `original_format_policy`: Policy for when to try extracting original formats. One of `always`, `never`, or `auto`. The default `auto` policy tries to avoid exceeding the web client's API rate-limit by only making an extra request when Vimeo publicizes the video's downloadability
|
||||||
|
|
||||||
**Note**: These options may be changed/removed in the future without concern for backward compatibility
|
**Note**: These options may be changed/removed in the future without concern for backward compatibility
|
||||||
|
|
||||||
@@ -1890,6 +1991,7 @@ # PLUGINS
|
|||||||
myplugin.py
|
myplugin.py
|
||||||
|
|
||||||
yt-dlp looks for these `yt_dlp_plugins` namespace folders in many locations (see below) and loads in plugins from **all** of them.
|
yt-dlp looks for these `yt_dlp_plugins` namespace folders in many locations (see below) and loads in plugins from **all** of them.
|
||||||
|
Set the environment variable `YTDLP_NO_PLUGINS` to something nonempty to disable loading plugins entirely.
|
||||||
|
|
||||||
See the [wiki for some known plugins](https://github.com/yt-dlp/yt-dlp/wiki/Plugins)
|
See the [wiki for some known plugins](https://github.com/yt-dlp/yt-dlp/wiki/Plugins)
|
||||||
|
|
||||||
@@ -1917,7 +2019,7 @@ ## Installing Plugins
|
|||||||
* Plugin packages can be installed and managed using `pip`. See [yt-dlp-sample-plugins](https://github.com/yt-dlp/yt-dlp-sample-plugins) for an example.
|
* Plugin packages can be installed and managed using `pip`. See [yt-dlp-sample-plugins](https://github.com/yt-dlp/yt-dlp-sample-plugins) for an example.
|
||||||
* Note: plugin files between plugin packages installed with pip must have unique filenames.
|
* Note: plugin files between plugin packages installed with pip must have unique filenames.
|
||||||
* Any path in `PYTHONPATH` is searched in for the `yt_dlp_plugins` namespace folder.
|
* Any path in `PYTHONPATH` is searched in for the `yt_dlp_plugins` namespace folder.
|
||||||
* Note: This does not apply for Pyinstaller/py2exe builds.
|
* Note: This does not apply for Pyinstaller builds.
|
||||||
|
|
||||||
|
|
||||||
`.zip`, `.egg` and `.whl` archives containing a `yt_dlp_plugins` namespace folder in their root are also supported as plugin packages.
|
`.zip`, `.egg` and `.whl` archives containing a `yt_dlp_plugins` namespace folder in their root are also supported as plugin packages.
|
||||||
@@ -2146,12 +2248,11 @@ ### New features
|
|||||||
|
|
||||||
* **[Format Sorting](#sorting-formats)**: The default format sorting options have been changed so that higher resolution and better codecs will be now preferred instead of simply using larger bitrate. Furthermore, you can now specify the sort order using `-S`. This allows for much easier format selection than what is possible by simply using `--format` ([examples](#format-selection-examples))
|
* **[Format Sorting](#sorting-formats)**: The default format sorting options have been changed so that higher resolution and better codecs will be now preferred instead of simply using larger bitrate. Furthermore, you can now specify the sort order using `-S`. This allows for much easier format selection than what is possible by simply using `--format` ([examples](#format-selection-examples))
|
||||||
|
|
||||||
* **Merged with animelover1984/youtube-dl**: You get most of the features and improvements from [animelover1984/youtube-dl](https://github.com/animelover1984/youtube-dl) including `--write-comments`, `BiliBiliSearch`, `BilibiliChannel`, Embedding thumbnail in mp4/ogg/opus, playlist infojson etc. Note that NicoNico livestreams are not available. See [#31](https://github.com/yt-dlp/yt-dlp/pull/31) for details.
|
* **Merged with animelover1984/youtube-dl**: You get most of the features and improvements from [animelover1984/youtube-dl](https://github.com/animelover1984/youtube-dl) including `--write-comments`, `BiliBiliSearch`, `BilibiliChannel`, Embedding thumbnail in mp4/ogg/opus, playlist infojson etc. See [#31](https://github.com/yt-dlp/yt-dlp/pull/31) for details.
|
||||||
|
|
||||||
* **YouTube improvements**:
|
* **YouTube improvements**:
|
||||||
* Supports Clips, Stories (`ytstories:<channel UCID>`), Search (including filters)**\***, YouTube Music Search, Channel-specific search, Search prefixes (`ytsearch:`, `ytsearchdate:`)**\***, Mixes, and Feeds (`:ytfav`, `:ytwatchlater`, `:ytsubs`, `:ythistory`, `:ytrec`, `:ytnotif`)
|
* Supports Clips, Stories (`ytstories:<channel UCID>`), Search (including filters)**\***, YouTube Music Search, Channel-specific search, Search prefixes (`ytsearch:`, `ytsearchdate:`)**\***, Mixes, and Feeds (`:ytfav`, `:ytwatchlater`, `:ytsubs`, `:ythistory`, `:ytrec`, `:ytnotif`)
|
||||||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||||
* Supports some (but not all) age-gated content without cookies
|
|
||||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||||
* Channel URLs download all uploads of the channel, including shorts and live
|
* Channel URLs download all uploads of the channel, including shorts and live
|
||||||
|
|
||||||
@@ -2177,9 +2278,9 @@ ### New features
|
|||||||
|
|
||||||
* **Output template improvements**: Output templates can now have date-time formatting, numeric offsets, object traversal etc. See [output template](#output-template) for details. Even more advanced operations can also be done with the help of `--parse-metadata` and `--replace-in-metadata`
|
* **Output template improvements**: Output templates can now have date-time formatting, numeric offsets, object traversal etc. See [output template](#output-template) for details. Even more advanced operations can also be done with the help of `--parse-metadata` and `--replace-in-metadata`
|
||||||
|
|
||||||
* **Other new options**: Many new options have been added such as `--alias`, `--print`, `--concat-playlist`, `--wait-for-video`, `--retry-sleep`, `--sleep-requests`, `--convert-thumbnails`, `--force-download-archive`, `--force-overwrites`, `--break-match-filter` etc
|
* **Other new options**: Many new options have been added such as `--alias`, `--print`, `--concat-playlist`, `--wait-for-video`, `--retry-sleep`, `--sleep-requests`, `--convert-thumbnails`, `--force-download-archive`, `--force-overwrites`, `--break-match-filters` etc
|
||||||
|
|
||||||
* **Improvements**: Regex and other operators in `--format`/`--match-filter`, multiple `--postprocessor-args` and `--downloader-args`, faster archive checking, more [format selection options](#format-selection), merge multi-video/audio, multiple `--config-locations`, `--exec` at different stages, etc
|
* **Improvements**: Regex and other operators in `--format`/`--match-filters`, multiple `--postprocessor-args` and `--downloader-args`, faster archive checking, more [format selection options](#format-selection), merge multi-video/audio, multiple `--config-locations`, `--exec` at different stages, etc
|
||||||
|
|
||||||
* **Plugins**: Extractors and PostProcessors can be loaded from an external file. See [plugins](#plugins) for details
|
* **Plugins**: Extractors and PostProcessors can be loaded from an external file. See [plugins](#plugins) for details
|
||||||
|
|
||||||
@@ -2195,12 +2296,12 @@ ### Differences in default behavior
|
|||||||
|
|
||||||
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
||||||
|
|
||||||
* yt-dlp supports only [Python 3.8+](## "Windows 7"), and *may* remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
* yt-dlp supports only [Python 3.10+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||||
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
||||||
* `avconv` is not supported as an alternative to `ffmpeg`
|
* `avconv` is not supported as an alternative to `ffmpeg`
|
||||||
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
||||||
* The default [output template](#output-template) is `%(title)s [%(id)s].%(ext)s`. There is no real reason for this change. This was changed before yt-dlp was ever made public and now there are no plans to change it back to `%(title)s-%(id)s.%(ext)s`. Instead, you may use `--compat-options filename`
|
* The default [output template](#output-template) is `%(title)s [%(id)s].%(ext)s`. There is no real reason for this change. This was changed before yt-dlp was ever made public and now there are no plans to change it back to `%(title)s-%(id)s.%(ext)s`. Instead, you may use `--compat-options filename`
|
||||||
* The default [format sorting](#sorting-formats) is different from youtube-dl and prefers higher resolution and better codecs rather than higher bitrates. You can use the `--format-sort` option to change this to any order you prefer, or use `--compat-options format-sort` to use youtube-dl's sorting order
|
* The default [format sorting](#sorting-formats) is different from youtube-dl and prefers higher resolution and better codecs rather than higher bitrates. You can use the `--format-sort` option to change this to any order you prefer, or use `--compat-options format-sort` to use youtube-dl's sorting order. Older versions of yt-dlp preferred VP9 due to its broader compatibility; you can use `--compat-options prefer-vp9-sort` to revert to that format sorting preference. These two compat options cannot be used together
|
||||||
* The default format selector is `bv*+ba/b`. This means that if a combined video + audio format that is better than the best video-only format is found, the former will be preferred. Use `-f bv+ba/b` or `--compat-options format-spec` to revert this
|
* The default format selector is `bv*+ba/b`. This means that if a combined video + audio format that is better than the best video-only format is found, the former will be preferred. Use `-f bv+ba/b` or `--compat-options format-spec` to revert this
|
||||||
* Unlike youtube-dlc, yt-dlp does not allow merging multiple audio/video streams into one file by default (since this conflicts with the use of `-f bv*+ba`). If needed, this feature must be enabled using `--audio-multistreams` and `--video-multistreams`. You can also use `--compat-options multistreams` to enable both
|
* Unlike youtube-dlc, yt-dlp does not allow merging multiple audio/video streams into one file by default (since this conflicts with the use of `-f bv*+ba`). If needed, this feature must be enabled using `--audio-multistreams` and `--video-multistreams`. You can also use `--compat-options multistreams` to enable both
|
||||||
* `--no-abort-on-error` is enabled by default. Use `--abort-on-error` or `--compat-options abort-on-error` to abort on errors instead
|
* `--no-abort-on-error` is enabled by default. Use `--abort-on-error` or `--compat-options abort-on-error` to abort on errors instead
|
||||||
@@ -2212,7 +2313,7 @@ ### Differences in default behavior
|
|||||||
* Live chats (if available) are considered as subtitles. Use `--sub-langs all,-live_chat` to download all subtitles except live chat. You can also use `--compat-options no-live-chat` to prevent any live chat/danmaku from downloading
|
* Live chats (if available) are considered as subtitles. Use `--sub-langs all,-live_chat` to download all subtitles except live chat. You can also use `--compat-options no-live-chat` to prevent any live chat/danmaku from downloading
|
||||||
* YouTube channel URLs download all uploads of the channel. To download only the videos in a specific tab, pass the tab's URL. If the channel does not show the requested tab, an error will be raised. Also, `/live` URLs raise an error if there are no live videos instead of silently downloading the entire channel. You may use `--compat-options no-youtube-channel-redirect` to revert all these redirections
|
* YouTube channel URLs download all uploads of the channel. To download only the videos in a specific tab, pass the tab's URL. If the channel does not show the requested tab, an error will be raised. Also, `/live` URLs raise an error if there are no live videos instead of silently downloading the entire channel. You may use `--compat-options no-youtube-channel-redirect` to revert all these redirections
|
||||||
* Unavailable videos are also listed for YouTube playlists. Use `--compat-options no-youtube-unavailable-videos` to remove this
|
* Unavailable videos are also listed for YouTube playlists. Use `--compat-options no-youtube-unavailable-videos` to remove this
|
||||||
* The upload dates extracted from YouTube are in UTC [when available](https://github.com/yt-dlp/yt-dlp/blob/89e4d86171c7b7c997c77d4714542e0383bf0db0/yt_dlp/extractor/youtube.py#L3898-L3900). Use `--compat-options no-youtube-prefer-utc-upload-date` to prefer the non-UTC upload date.
|
* The upload dates extracted from YouTube are in UTC.
|
||||||
* If `ffmpeg` is used as the downloader, the downloading and merging of formats happen in a single step when possible. Use `--compat-options no-direct-merge` to revert this
|
* If `ffmpeg` is used as the downloader, the downloading and merging of formats happen in a single step when possible. Use `--compat-options no-direct-merge` to revert this
|
||||||
* Thumbnail embedding in `mp4` is done with mutagen if possible. Use `--compat-options embed-thumbnail-atomicparsley` to force the use of AtomicParsley instead
|
* Thumbnail embedding in `mp4` is done with mutagen if possible. Use `--compat-options embed-thumbnail-atomicparsley` to force the use of AtomicParsley instead
|
||||||
* Some internal metadata such as filenames are removed by default from the infojson. Use `--no-clean-infojson` or `--compat-options no-clean-infojson` to revert this
|
* Some internal metadata such as filenames are removed by default from the infojson. Use `--no-clean-infojson` or `--compat-options no-clean-infojson` to revert this
|
||||||
@@ -2220,20 +2321,22 @@ ### Differences in default behavior
|
|||||||
* `certifi` will be used for SSL root certificates, if installed. If you want to use system certificates (e.g. self-signed), use `--compat-options no-certifi`
|
* `certifi` will be used for SSL root certificates, if installed. If you want to use system certificates (e.g. self-signed), use `--compat-options no-certifi`
|
||||||
* yt-dlp's sanitization of invalid characters in filenames is different/smarter than in youtube-dl. You can use `--compat-options filename-sanitization` to revert to youtube-dl's behavior
|
* yt-dlp's sanitization of invalid characters in filenames is different/smarter than in youtube-dl. You can use `--compat-options filename-sanitization` to revert to youtube-dl's behavior
|
||||||
* ~~yt-dlp tries to parse the external downloader outputs into the standard progress output if possible (Currently implemented: [aria2c](https://github.com/yt-dlp/yt-dlp/issues/5931)). You can use `--compat-options no-external-downloader-progress` to get the downloader output as-is~~
|
* ~~yt-dlp tries to parse the external downloader outputs into the standard progress output if possible (Currently implemented: [aria2c](https://github.com/yt-dlp/yt-dlp/issues/5931)). You can use `--compat-options no-external-downloader-progress` to get the downloader output as-is~~
|
||||||
* yt-dlp versions between 2021.09.01 and 2023.01.02 applies `--match-filter` to nested playlists. This was an unintentional side-effect of [8f18ac](https://github.com/yt-dlp/yt-dlp/commit/8f18aca8717bb0dd49054555af8d386e5eda3a88) and is fixed in [d7b460](https://github.com/yt-dlp/yt-dlp/commit/d7b460d0e5fc710950582baed2e3fc616ed98a80). Use `--compat-options playlist-match-filter` to revert this
|
* yt-dlp versions between 2021.09.01 and 2023.01.02 applies `--match-filters` to nested playlists. This was an unintentional side-effect of [8f18ac](https://github.com/yt-dlp/yt-dlp/commit/8f18aca8717bb0dd49054555af8d386e5eda3a88) and is fixed in [d7b460](https://github.com/yt-dlp/yt-dlp/commit/d7b460d0e5fc710950582baed2e3fc616ed98a80). Use `--compat-options playlist-match-filter` to revert this
|
||||||
* yt-dlp versions between 2021.11.10 and 2023.06.21 estimated `filesize_approx` values for fragmented/manifest formats. This was added for convenience in [f2fe69](https://github.com/yt-dlp/yt-dlp/commit/f2fe69c7b0d208bdb1f6292b4ae92bc1e1a7444a), but was reverted in [0dff8e](https://github.com/yt-dlp/yt-dlp/commit/0dff8e4d1e6e9fb938f4256ea9af7d81f42fd54f) due to the potentially extreme inaccuracy of the estimated values. Use `--compat-options manifest-filesize-approx` to keep extracting the estimated values
|
* yt-dlp versions between 2021.11.10 and 2023.06.21 estimated `filesize_approx` values for fragmented/manifest formats. This was added for convenience in [f2fe69](https://github.com/yt-dlp/yt-dlp/commit/f2fe69c7b0d208bdb1f6292b4ae92bc1e1a7444a), but was reverted in [0dff8e](https://github.com/yt-dlp/yt-dlp/commit/0dff8e4d1e6e9fb938f4256ea9af7d81f42fd54f) due to the potentially extreme inaccuracy of the estimated values. Use `--compat-options manifest-filesize-approx` to keep extracting the estimated values
|
||||||
* yt-dlp uses modern http client backends such as `requests`. Use `--compat-options prefer-legacy-http-handler` to prefer the legacy http handler (`urllib`) to be used for standard http requests.
|
* yt-dlp uses modern http client backends such as `requests`. Use `--compat-options prefer-legacy-http-handler` to prefer the legacy http handler (`urllib`) to be used for standard http requests.
|
||||||
* The sub-modules `swfinterp`, `casefold` are removed.
|
* The sub-modules `swfinterp`, `casefold` are removed.
|
||||||
* Passing `--simulate` (or calling `extract_info` with `download=False`) no longer alters the default format selection. See [#9843](https://github.com/yt-dlp/yt-dlp/issues/9843) for details.
|
* Passing `--simulate` (or calling `extract_info` with `download=False`) no longer alters the default format selection. See [#9843](https://github.com/yt-dlp/yt-dlp/issues/9843) for details.
|
||||||
|
* yt-dlp no longer applies the server modified time to downloaded files by default. Use `--mtime` or `--compat-options mtime-by-default` to revert this.
|
||||||
|
|
||||||
For ease of use, a few more compat options are available:
|
For ease of use, a few more compat options are available:
|
||||||
|
|
||||||
* `--compat-options all`: Use all compat options (**Do NOT use this!**)
|
* `--compat-options all`: Use all compat options (**Do NOT use this!**)
|
||||||
* `--compat-options youtube-dl`: Same as `--compat-options all,-multistreams,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext`
|
* `--compat-options youtube-dl`: Same as `--compat-options all,-multistreams,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext,-prefer-vp9-sort`
|
||||||
* `--compat-options youtube-dlc`: Same as `--compat-options all,-no-live-chat,-no-youtube-channel-redirect,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext`
|
* `--compat-options youtube-dlc`: Same as `--compat-options all,-no-live-chat,-no-youtube-channel-redirect,-playlist-match-filter,-manifest-filesize-approx,-allow-unsafe-ext,-prefer-vp9-sort`
|
||||||
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization,no-youtube-prefer-utc-upload-date`
|
* `--compat-options 2021`: Same as `--compat-options 2022,no-certifi,filename-sanitization`
|
||||||
* `--compat-options 2022`: Same as `--compat-options 2023,playlist-match-filter,no-external-downloader-progress,prefer-legacy-http-handler,manifest-filesize-approx`
|
* `--compat-options 2022`: Same as `--compat-options 2023,playlist-match-filter,no-external-downloader-progress,prefer-legacy-http-handler,manifest-filesize-approx`
|
||||||
* `--compat-options 2023`: Currently does nothing. Use this to enable all future compat options
|
* `--compat-options 2023`: Same as `--compat-options 2024,prefer-vp9-sort`
|
||||||
|
* `--compat-options 2024`: Same as `--compat-options mtime-by-default`. Use this to enable all future compat options
|
||||||
|
|
||||||
The following compat options restore vulnerable behavior from before security patches:
|
The following compat options restore vulnerable behavior from before security patches:
|
||||||
|
|
||||||
@@ -2266,13 +2369,13 @@ #### Redundant options
|
|||||||
--get-thumbnail --print thumbnail
|
--get-thumbnail --print thumbnail
|
||||||
-e, --get-title --print title
|
-e, --get-title --print title
|
||||||
-g, --get-url --print urls
|
-g, --get-url --print urls
|
||||||
--match-title REGEX --match-filter "title ~= (?i)REGEX"
|
--match-title REGEX --match-filters "title ~= (?i)REGEX"
|
||||||
--reject-title REGEX --match-filter "title !~= (?i)REGEX"
|
--reject-title REGEX --match-filters "title !~= (?i)REGEX"
|
||||||
--min-views COUNT --match-filter "view_count >=? COUNT"
|
--min-views COUNT --match-filters "view_count >=? COUNT"
|
||||||
--max-views COUNT --match-filter "view_count <=? COUNT"
|
--max-views COUNT --match-filters "view_count <=? COUNT"
|
||||||
--break-on-reject Use --break-match-filter
|
--break-on-reject Use --break-match-filters
|
||||||
--user-agent UA --add-header "User-Agent:UA"
|
--user-agent UA --add-headers "User-Agent:UA"
|
||||||
--referer URL --add-header "Referer:URL"
|
--referer URL --add-headers "Referer:URL"
|
||||||
--playlist-start NUMBER -I NUMBER:
|
--playlist-start NUMBER -I NUMBER:
|
||||||
--playlist-end NUMBER -I :NUMBER
|
--playlist-end NUMBER -I :NUMBER
|
||||||
--playlist-reverse -I ::-1
|
--playlist-reverse -I ::-1
|
||||||
@@ -2295,11 +2398,7 @@ #### Not recommended
|
|||||||
--hls-prefer-native --downloader "m3u8:native"
|
--hls-prefer-native --downloader "m3u8:native"
|
||||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
--list-formats-as-table --compat-options -list-formats [Default]
|
||||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
|
||||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
|
||||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
|
||||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
|
||||||
--geo-bypass --xff "default"
|
--geo-bypass --xff "default"
|
||||||
--no-geo-bypass --xff "never"
|
--no-geo-bypass --xff "never"
|
||||||
--geo-bypass-country CODE --xff CODE
|
--geo-bypass-country CODE --xff CODE
|
||||||
@@ -2310,20 +2409,14 @@ #### Developer options
|
|||||||
|
|
||||||
--test Download only part of video for testing extractors
|
--test Download only part of video for testing extractors
|
||||||
--load-pages Load pages dumped by --write-pages
|
--load-pages Load pages dumped by --write-pages
|
||||||
--youtube-print-sig-code For testing youtube signatures
|
|
||||||
--allow-unplayable-formats List unplayable formats also
|
--allow-unplayable-formats List unplayable formats also
|
||||||
--no-allow-unplayable-formats Default
|
--no-allow-unplayable-formats Default
|
||||||
|
|
||||||
#### Old aliases
|
#### Old aliases
|
||||||
These are aliases that are no longer documented for various reasons
|
These are aliases that are no longer documented for various reasons
|
||||||
|
|
||||||
--avconv-location --ffmpeg-location
|
|
||||||
--clean-infojson --clean-info-json
|
--clean-infojson --clean-info-json
|
||||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
|
||||||
--dump-headers --print-traffic
|
|
||||||
--dump-intermediate-pages --dump-pages
|
|
||||||
--force-write-download-archive --force-write-archive
|
--force-write-download-archive --force-write-archive
|
||||||
--load-info --load-info-json
|
|
||||||
--no-clean-infojson --no-clean-info-json
|
--no-clean-infojson --no-clean-info-json
|
||||||
--no-split-tracks --no-split-chapters
|
--no-split-tracks --no-split-chapters
|
||||||
--no-write-srt --no-write-subs
|
--no-write-srt --no-write-subs
|
||||||
@@ -2336,7 +2429,7 @@ #### Old aliases
|
|||||||
--yes-overwrites --force-overwrites
|
--yes-overwrites --force-overwrites
|
||||||
|
|
||||||
#### Sponskrub Options
|
#### Sponskrub Options
|
||||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||||
|
|
||||||
--sponskrub --sponsorblock-mark all
|
--sponskrub --sponsorblock-mark all
|
||||||
--no-sponskrub --no-sponsorblock
|
--no-sponskrub --no-sponsorblock
|
||||||
@@ -2358,6 +2451,17 @@ #### No longer supported
|
|||||||
--no-include-ads Default
|
--no-include-ads Default
|
||||||
--write-annotations No supported site has annotations now
|
--write-annotations No supported site has annotations now
|
||||||
--no-write-annotations Default
|
--no-write-annotations Default
|
||||||
|
--avconv-location Removed alias for --ffmpeg-location
|
||||||
|
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||||
|
--dump-headers Removed alias for --print-traffic
|
||||||
|
--dump-intermediate-pages Removed alias for --dump-pages
|
||||||
|
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||||
|
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||||
|
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||||
|
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||||
|
--youtube-print-sig-code Removed testing functionality
|
||||||
|
--dump-user-agent No longer supported
|
||||||
|
--xattr-set-filesize No longer supported
|
||||||
--compat-options seperate-video-versions No longer needed
|
--compat-options seperate-video-versions No longer needed
|
||||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||||
|
|
||||||
|
|||||||
4473
THIRD_PARTY_LICENSES.txt
Normal file
4473
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,178 @@
|
|||||||
services:
|
services:
|
||||||
static:
|
|
||||||
build: static
|
linux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
environment:
|
environment:
|
||||||
channel: ${channel}
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
origin: ${origin}
|
CHANNEL: ${CHANNEL:?}
|
||||||
version: ${version}
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
volumes:
|
volumes:
|
||||||
- ~/build:/build
|
|
||||||
- ../..:/yt-dlp
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_armv7l:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
- ../../venv:/yt-dlp-build-venv
|
||||||
|
|
||||||
|
linux_armv7l_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
EXCLUDE_CURL_CFFI: "1"
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|||||||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
|
ARG VERIFYIMAGE=alpine:3.22
|
||||||
|
|
||||||
|
|
||||||
|
FROM $BUILDIMAGE AS build
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY build.sh /build.sh
|
||||||
|
ENTRYPOINT ["/build.sh"]
|
||||||
|
|
||||||
|
|
||||||
|
FROM $VERIFYIMAGE AS verify
|
||||||
|
|
||||||
|
WORKDIR /testing
|
||||||
|
COPY verify.sh /verify.sh
|
||||||
|
ENTRYPOINT ["/verify.sh"]
|
||||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||||
|
PYTHON_VERSION="3.13"
|
||||||
|
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
function runpy {
|
||||||
|
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function venvpy {
|
||||||
|
"python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
INCLUDES=(
|
||||||
|
--include-group pyinstaller
|
||||||
|
--include-group secretstorage
|
||||||
|
)
|
||||||
|
|
||||||
|
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||||
|
INCLUDES+=(--include-group curl-cffi)
|
||||||
|
fi
|
||||||
|
|
||||||
|
runpy -m venv /yt-dlp-build-venv
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /yt-dlp-build-venv/bin/activate
|
||||||
|
# Inside the venv we use venvpy instead of runpy
|
||||||
|
venvpy -m ensurepip --upgrade --default-pip
|
||||||
|
venvpy -m devscripts.install_deps --only-optional-groups --include-group build
|
||||||
|
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||||
|
venvpy -m devscripts.make_lazy_extractors
|
||||||
|
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||||
|
mkdir -p /build
|
||||||
|
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||||
|
pushd "/build/${EXE_NAME}"
|
||||||
|
chmod +x "${EXE_NAME}"
|
||||||
|
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||||
|
popd
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||||
|
venvpy -m bundle.pyinstaller
|
||||||
|
chmod +x "./dist/${EXE_NAME}"
|
||||||
|
fi
|
||||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||||
|
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||||
|
echo "All executable builds were skipped"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Extracting zip to verify onedir build"
|
||||||
|
if command -v python3 >/dev/null 2>&1; then
|
||||||
|
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||||
|
else
|
||||||
|
echo "Attempting to install unzip"
|
||||||
|
if command -v dnf >/dev/null 2>&1; then
|
||||||
|
dnf -y install --allowerasing unzip
|
||||||
|
elif command -v yum >/dev/null 2>&1; then
|
||||||
|
yum -y install unzip
|
||||||
|
elif command -v apt-get >/dev/null 2>&1; then
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||||
|
elif command -v apk >/dev/null 2>&1; then
|
||||||
|
apk add --no-cache unzip
|
||||||
|
else
|
||||||
|
echo "Unsupported image"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||||
|
fi
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verifying onefile build"
|
||||||
|
cp "/build/${EXE_NAME}" ./
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
|
||||||
|
if [ -z "${UPDATE_TO:-}" ]; then
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||||
|
version="$("./${EXE_NAME}" --version)"
|
||||||
|
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||||
|
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||||
|
if [ "${version}" = "${downgraded_version}" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
FROM alpine:3.19 as base
|
|
||||||
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
build-base \
|
|
||||||
python3 \
|
|
||||||
pipx \
|
|
||||||
;
|
|
||||||
|
|
||||||
RUN pipx install pyinstaller
|
|
||||||
# Requires above step to prepare the shared venv
|
|
||||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
scons \
|
|
||||||
patchelf \
|
|
||||||
binutils \
|
|
||||||
;
|
|
||||||
RUN pipx install staticx
|
|
||||||
|
|
||||||
WORKDIR /yt-dlp
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
|
||||||
ENTRYPOINT /entrypoint.sh
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/ash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
|
||||||
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
|
||||||
python -m devscripts.make_lazy_extractors
|
|
||||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
|
||||||
python -m bundle.pyinstaller
|
|
||||||
deactivate
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
|
||||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
|
||||||
deactivate
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow execution from anywhere
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from py2exe import freeze
|
|
||||||
|
|
||||||
from devscripts.utils import read_version
|
|
||||||
|
|
||||||
VERSION = read_version()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
warnings.warn(
|
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
|
||||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
|
||||||
|
|
||||||
freeze(
|
|
||||||
console=[{
|
|
||||||
'script': './yt_dlp/__main__.py',
|
|
||||||
'dest_base': 'yt-dlp',
|
|
||||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
|
||||||
}],
|
|
||||||
version_info={
|
|
||||||
'version': VERSION,
|
|
||||||
'description': 'A feature-rich command-line audio/video downloader',
|
|
||||||
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
|
||||||
'product_name': 'yt-dlp',
|
|
||||||
'product_version': VERSION,
|
|
||||||
},
|
|
||||||
options={
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': [
|
|
||||||
# py2exe cannot import Crypto
|
|
||||||
'Crypto',
|
|
||||||
'Cryptodome',
|
|
||||||
# requests >=2.32.0 breaks py2exe builds due to certifi dependency
|
|
||||||
'requests',
|
|
||||||
'urllib3',
|
|
||||||
],
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
# Modules that are only imported dynamically must be added here
|
|
||||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
|
||||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
|
||||||
},
|
|
||||||
zipfile=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -13,6 +13,8 @@
|
|||||||
from devscripts.utils import read_version
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
|
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||||
|
OS_NAME = 'musllinux'
|
||||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
MACHINE = 'x86' if ARCH == '32' else ''
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
@@ -36,6 +38,9 @@ def main():
|
|||||||
f'--name={name}',
|
f'--name={name}',
|
||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13311
|
||||||
|
# https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
|
'--exclude-module=pkg_resources',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
*opts,
|
*opts,
|
||||||
@@ -59,16 +64,22 @@ def parse_options():
|
|||||||
|
|
||||||
def exe(onedir):
|
def exe(onedir):
|
||||||
"""@returns (name, path)"""
|
"""@returns (name, path)"""
|
||||||
|
platform_name, machine, extension = {
|
||||||
|
'win32': (None, MACHINE, '.exe'),
|
||||||
|
'darwin': ('macos', None, None),
|
||||||
|
}.get(OS_NAME, (OS_NAME, MACHINE, None))
|
||||||
|
|
||||||
name = '_'.join(filter(None, (
|
name = '_'.join(filter(None, (
|
||||||
'yt-dlp',
|
'yt-dlp',
|
||||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
platform_name,
|
||||||
MACHINE,
|
machine,
|
||||||
)))
|
)))
|
||||||
|
|
||||||
return name, ''.join(filter(None, (
|
return name, ''.join(filter(None, (
|
||||||
'dist/',
|
'dist/',
|
||||||
onedir and f'{name}/',
|
onedir and f'{name}/',
|
||||||
name,
|
name,
|
||||||
OS_NAME == 'win32' and '.exe',
|
extension,
|
||||||
)))
|
)))
|
||||||
|
|
||||||
|
|
||||||
@@ -118,7 +129,6 @@ def windows_set_version(exe, version):
|
|||||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
|
||||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||||
StringStruct(
|
StringStruct(
|
||||||
|
|||||||
@@ -6,13 +6,17 @@ __yt_dlp()
|
|||||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||||
opts="{{flags}}"
|
opts="{{flags}}"
|
||||||
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
||||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info"
|
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info-json"
|
||||||
diropts="--cache-dir"
|
diropts="--cache-dir"
|
||||||
|
|
||||||
if [[ ${prev} =~ ${fileopts} ]]; then
|
if [[ ${prev} =~ ${fileopts} ]]; then
|
||||||
|
local IFS=$'\n'
|
||||||
|
type compopt &>/dev/null && compopt -o filenames
|
||||||
COMPREPLY=( $(compgen -f -- ${cur}) )
|
COMPREPLY=( $(compgen -f -- ${cur}) )
|
||||||
return 0
|
return 0
|
||||||
elif [[ ${prev} =~ ${diropts} ]]; then
|
elif [[ ${prev} =~ ${diropts} ]]; then
|
||||||
|
local IFS=$'\n'
|
||||||
|
type compopt &>/dev/null && compopt -o dirnames
|
||||||
COMPREPLY=( $(compgen -d -- ${cur}) )
|
COMPREPLY=( $(compgen -d -- ${cur}) )
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -185,5 +185,139 @@
|
|||||||
"action": "add",
|
"action": "add",
|
||||||
"when": "6075a029dba70a89675ae1250e7cdfd91f0eba41",
|
"when": "6075a029dba70a89675ae1250e7cdfd91f0eba41",
|
||||||
"short": "[priority] Security: [[ie/douyutv] Do not use dangerous javascript source/URL](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3v33-3wmw-3785)\n - A dependency on potentially malicious third-party JavaScript code has been removed from the Douyu extractors"
|
"short": "[priority] Security: [[ie/douyutv] Do not use dangerous javascript source/URL](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3v33-3wmw-3785)\n - A dependency on potentially malicious third-party JavaScript code has been removed from the Douyu extractors"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "fb8b7f226d251e521a89b23c415e249e5b788e5c",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.9**\nSince Python 3.8 will reach end-of-life in October 2024, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b31b81d85f00601710d4fac590c3e4efb4133283",
|
||||||
|
"short": "[ci] Rerun failed tests (#11143)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||||
|
"short": "[priority] **py2exe is no longer supported**\nThis release's `yt-dlp_min.exe` will be the last, and it's actually a PyInstaller-bundled executable so that yt-dlp users updating their py2exe build with `-U` will be automatically migrated. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10087)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||||
|
"short": "[priority] **Following this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "87884f15580910e4e0fe0e1db73508debc657471",
|
||||||
|
"short": "[priority] **Beginning with this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "d784464399b600ba9516bbcec6286f11d68974dd",
|
||||||
|
"short": "[priority] **The minimum *required* Python version has been raised to 3.9**\nPython 3.8 reached its end-of-life on 2024.10.07, and yt-dlp has now removed support for it. As an unfortunate side effect, the official `yt-dlp.exe` and `yt-dlp_x86.exe` binaries are no longer supported on Windows 7. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "914af9a0cf51c9a3f74aa88d952bee8334c67511",
|
||||||
|
"short": "Expand paths in `--plugin-dirs` (#11334)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "c29f5a7fae93a08f3cfbb6127b2faa75145b06a0",
|
||||||
|
"short": "[ie/generic] Do not impersonate by default (#11336)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "57212a5f97ce367590aaa5c3e9a135eead8f81f7",
|
||||||
|
"short": "[ie/vimeo] Fix API retries (#11351)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "52c0ffe40ad6e8404d93296f575007b05b04c686",
|
||||||
|
"short": "[priority] **Login with OAuth is no longer supported for YouTube**\nDue to a change made by the site, yt-dlp is no longer able to support OAuth login for YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/11462#issuecomment-2471703090)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "76ac023ff02f06e8c003d104f02a03deeddebdcd",
|
||||||
|
"short": "[ie/youtube:tab] Improve shorts title extraction (#11997)",
|
||||||
|
"authors": ["bashonly", "d3d9"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "88eb1e7a9a2720ac89d653c0d0e40292388823bb",
|
||||||
|
"short": "[priority] **New option `--preset-alias`/`-t` has been added**\nThis provides convenient predefined aliases for common use cases. Available presets include `mp4`, `mp3`, `mkv`, `aac`, and `sleep`. See [the README](https://github.com/yt-dlp/yt-dlp/blob/master/README.md#preset-aliases) for more details."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "d596824c2f8428362c072518856065070616e348"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "7b81634fb1d15999757e7a9883daa6ef09ea785b"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "500761e41acb96953a5064e951d41d190c287e46"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "f3008bc5f89d2691f2f8dfc51b406ef4e25281c3",
|
||||||
|
"short": "[priority] **Default behaviour changed from `--mtime` to `--no-mtime`**\nyt-dlp no longer applies the server modified time to downloaded files by default. [Read more](https://github.com/yt-dlp/yt-dlp/issues/12780)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "959ac99e98c3215437e573c22d64be42d361e863",
|
||||||
|
"short": "[priority] Security: [[CVE-2025-54072](https://nvd.nist.gov/vuln/detail/CVE-2025-54072)] [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-45hg-7f49-5h56)\n - When `--exec` is used on Windows, the filepath expanded from `{}` (or the default placeholder) is now properly escaped"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b831406a1d3be34c159835079d12bae624c43610",
|
||||||
|
"short": "[ie/rtve.es:program] Add extractor (#12955)",
|
||||||
|
"authors": ["meGAmeS1", "seproDev"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "23c658b9cbe34a151f8f921ab1320bb5d4e40a4d",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.10**\nSince Python 3.9 will reach end-of-life in October 2025, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "cc5a5caac5fbc0d605b52bde0778d6fd5f97b5ab",
|
||||||
|
"short": "[priority] **darwin_legacy_exe builds are being discontinued**\nThis release's `yt-dlp_macos_legacy` binary will likely be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13856)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||||
|
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||||
|
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6",
|
||||||
|
"short": "[priority] **The minimum *required* Python version has been raised to 3.10**\nPython 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb",
|
||||||
|
"short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "8636a9bac3bed99984c1e297453660468ecf504b",
|
||||||
|
"short": "Fix 6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "6224a3898821965a7d6a2cb9cc2de40a0fd6e6bc",
|
||||||
|
"short": "[priority] **An external JavaScript runtime is now required for full YouTube support**\nyt-dlp now requires users to have an external JavaScript runtime (e.g. Deno) installed in order to solve the JavaScript challenges presented by YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/15012)"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ def parse_patched_options(opts):
|
|||||||
'fragment_retries': 0,
|
'fragment_retries': 0,
|
||||||
'extract_flat': False,
|
'extract_flat': False,
|
||||||
'concat_playlist': 'never',
|
'concat_playlist': 'never',
|
||||||
|
'update_self': False,
|
||||||
})
|
})
|
||||||
yt_dlp.options.create_parser = lambda: patched_parser
|
yt_dlp.options.create_parser = lambda: patched_parser
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -11,13 +11,12 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||||
from yt_dlp.utils import intlist_to_bytes
|
|
||||||
|
|
||||||
secret_msg = b'Secret message goes here'
|
secret_msg = b'Secret message goes here'
|
||||||
|
|
||||||
|
|
||||||
def hex_str(int_list):
|
def hex_str(int_list):
|
||||||
return codecs.encode(intlist_to_bytes(int_list), 'hex')
|
return codecs.encode(bytes(int_list), 'hex')
|
||||||
|
|
||||||
|
|
||||||
def openssl_encode(algo, key, iv):
|
def openssl_encode(algo, key, iv):
|
||||||
|
|||||||
329
devscripts/generate_third_party_licenses.py
Normal file
329
devscripts/generate_third_party_licenses.py
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
import requests
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||||
|
CACHE_LOCATION = '.license_cache'
|
||||||
|
HEADER = '''THIRD-PARTY LICENSES
|
||||||
|
|
||||||
|
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||||
|
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||||
|
Source code for bundled third-party components is available from the original projects.
|
||||||
|
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Dependency:
|
||||||
|
name: str
|
||||||
|
license_url: str
|
||||||
|
project_url: str = ''
|
||||||
|
license: str = ''
|
||||||
|
comment: str = ''
|
||||||
|
|
||||||
|
|
||||||
|
DEPENDENCIES: list[Dependency] = [
|
||||||
|
# Core runtime environment components
|
||||||
|
Dependency(
|
||||||
|
name='Python',
|
||||||
|
license='PSF-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://www.python.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Microsoft Distributable Code',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||||
|
comment='Only included in Windows builds',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='bzip2',
|
||||||
|
license='bzip2-1.0.6',
|
||||||
|
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||||
|
project_url='https://sourceware.org/bzip2/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://sourceware.org/libffi/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='OpenSSL 3.0+',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||||
|
project_url='https://www.openssl.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SQLite',
|
||||||
|
license='Public Domain', # Technically does not need to be included
|
||||||
|
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||||
|
project_url='https://www.sqlite.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='liblzma',
|
||||||
|
license='0BSD', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||||
|
project_url='https://tukaani.org/xz/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mpdecimal',
|
||||||
|
license='BSD-2-Clause',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||||
|
project_url='https://www.bytereef.org/mpdecimal/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zlib',
|
||||||
|
license='zlib',
|
||||||
|
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||||
|
project_url='https://zlib.net/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Expat',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||||
|
project_url='https://libexpat.github.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='ncurses',
|
||||||
|
license='X11-distribute-modifications-variant',
|
||||||
|
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||||
|
comment='Only included in Linux/macOS builds',
|
||||||
|
project_url='https://invisible-island.net/ncurses/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='GNU Readline',
|
||||||
|
license='GPL-3.0-or-later',
|
||||||
|
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://www.gnu.org/software/readline/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libstdc++',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libgcc',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libuuid',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libintl',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/gettext/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2 (Unicode character data files)',
|
||||||
|
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libunistring',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libunistring/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='librtmp',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zstd',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://facebook.github.io/zstd/',
|
||||||
|
),
|
||||||
|
|
||||||
|
# Python packages
|
||||||
|
Dependency(
|
||||||
|
name='brotli',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://brotli.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='curl_cffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://curl-cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of curl_cffi
|
||||||
|
Dependency(
|
||||||
|
name='curl-impersonate',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='cffi',
|
||||||
|
license='MIT-0', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependecy of cffi
|
||||||
|
Dependency(
|
||||||
|
name='pycparser',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://github.com/eliben/pycparser',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mutagen',
|
||||||
|
license='GPL-2.0-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||||
|
project_url='https://mutagen.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='PyCryptodome',
|
||||||
|
license='Public Domain and BSD-2-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||||
|
project_url='https://www.pycryptodome.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='certifi',
|
||||||
|
license='MPL-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://github.com/certifi/python-certifi',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='requests',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://requests.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='charset-normalizer',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://charset-normalizer.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='idna',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||||
|
project_url='https://github.com/kjd/idna',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='urllib3',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||||
|
project_url='https://urllib3.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SecretStorage',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://secretstorage.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='cryptography',
|
||||||
|
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||||
|
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://cryptography.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='Jeepney',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://jeepney.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='websockets',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://websockets.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependencies of yt-dlp-ejs
|
||||||
|
Dependency(
|
||||||
|
name='Meriyah',
|
||||||
|
license='ISC',
|
||||||
|
license_url='https://raw.githubusercontent.com/meriyah/meriyah/refs/heads/main/LICENSE.md',
|
||||||
|
project_url='https://github.com/meriyah/meriyah',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Astring',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/davidbonnet/astring/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://github.com/davidbonnet/astring/',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_text(dep: Dependency) -> str:
|
||||||
|
cache_dir = Path(CACHE_LOCATION)
|
||||||
|
cache_dir.mkdir(exist_ok=True)
|
||||||
|
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||||
|
cache_file = cache_dir / f'{url_hash}.txt'
|
||||||
|
|
||||||
|
if cache_file.exists():
|
||||||
|
return cache_file.read_text()
|
||||||
|
|
||||||
|
# UA needed since some domains block requests default UA
|
||||||
|
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||||
|
req.raise_for_status()
|
||||||
|
text = req.text
|
||||||
|
cache_file.write_text(text)
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def build_output() -> str:
|
||||||
|
lines = [HEADER]
|
||||||
|
for d in DEPENDENCIES:
|
||||||
|
lines.append('\n')
|
||||||
|
lines.append('-' * 80)
|
||||||
|
header = f'{d.name}'
|
||||||
|
if d.license:
|
||||||
|
header += f' | {d.license}'
|
||||||
|
if d.comment:
|
||||||
|
header += f'\nNote: {d.comment}'
|
||||||
|
if d.project_url:
|
||||||
|
header += f'\nURL: {d.project_url}'
|
||||||
|
lines.append(header)
|
||||||
|
lines.append('-' * 80)
|
||||||
|
|
||||||
|
text = fetch_text(d)
|
||||||
|
lines.append(text.strip('\n') + '\n')
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
content = build_output()
|
||||||
|
Path(DEFAULT_OUTPUT).write_text(content)
|
||||||
@@ -22,14 +22,19 @@ def parse_args():
|
|||||||
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||||
help='input file (default: %(default)s)')
|
help='input file (default: %(default)s)')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
'-e', '--exclude-dependency', metavar='DEPENDENCY', action='append',
|
||||||
help='exclude a dependency')
|
help='exclude a dependency (can be used multiple times)')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-i', '--include', metavar='GROUP', action='append',
|
'-i', '--include-group', metavar='GROUP', action='append',
|
||||||
help='include an optional dependency group')
|
help='include an optional dependency group (can be used multiple times)')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--only-optional', action='store_true',
|
'-c', '--cherry-pick', metavar='DEPENDENCY', action='append',
|
||||||
help='only install optional dependencies')
|
help=(
|
||||||
|
'only include a specific dependency from the resulting dependency list '
|
||||||
|
'(can be used multiple times)'))
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--only-optional-groups', action='store_true',
|
||||||
|
help='omit default dependencies unless the "default" group is specified with --include-group')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--print', action='store_true',
|
'-p', '--print', action='store_true',
|
||||||
help='only print requirements to stdout')
|
help='only print requirements to stdout')
|
||||||
@@ -39,30 +44,41 @@ def parse_args():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def uniq(arg) -> dict[str, None]:
|
||||||
|
return dict.fromkeys(map(str.lower, arg or ()))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
project_table = parse_toml(read_file(args.input))['project']
|
project_table = parse_toml(read_file(args.input))['project']
|
||||||
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||||
optional_groups = project_table['optional-dependencies']
|
optional_groups = project_table['optional-dependencies']
|
||||||
excludes = args.exclude or []
|
|
||||||
|
excludes = uniq(args.exclude_dependency)
|
||||||
|
only_includes = uniq(args.cherry_pick)
|
||||||
|
include_groups = uniq(args.include_group)
|
||||||
|
|
||||||
def yield_deps(group):
|
def yield_deps(group):
|
||||||
for dep in group:
|
for dep in group:
|
||||||
if mobj := recursive_pattern.fullmatch(dep):
|
if mobj := recursive_pattern.fullmatch(dep):
|
||||||
yield from optional_groups.get(mobj.group('group_name'), [])
|
yield from optional_groups.get(mobj.group('group_name'), ())
|
||||||
else:
|
else:
|
||||||
yield dep
|
yield dep
|
||||||
|
|
||||||
targets = []
|
targets = {}
|
||||||
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
if not args.only_optional_groups:
|
||||||
targets.extend(project_table['dependencies'])
|
# legacy: 'dependencies' is empty now
|
||||||
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
targets.update(dict.fromkeys(project_table['dependencies']))
|
||||||
targets.extend(yield_deps(optional_groups['default']))
|
targets.update(dict.fromkeys(yield_deps(optional_groups['default'])))
|
||||||
|
|
||||||
for include in filter(None, map(optional_groups.get, args.include or [])):
|
for include in filter(None, map(optional_groups.get, include_groups)):
|
||||||
targets.extend(yield_deps(include))
|
targets.update(dict.fromkeys(yield_deps(include)))
|
||||||
|
|
||||||
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
def target_filter(target):
|
||||||
|
name = re.match(r'[\w-]+', target).group(0).lower()
|
||||||
|
return name not in excludes and (not only_includes or name in only_includes)
|
||||||
|
|
||||||
|
targets = list(filter(target_filter, targets))
|
||||||
|
|
||||||
if args.print:
|
if args.print:
|
||||||
for target in targets:
|
for target in targets:
|
||||||
|
|||||||
@@ -71,14 +71,13 @@ def group_lookup(cls):
|
|||||||
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||||
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||||
|
|
||||||
result = cls.group_lookup().get(group)
|
if result := cls.group_lookup().get(group):
|
||||||
if not result:
|
return result, subgroup or None
|
||||||
|
|
||||||
if subgroup:
|
if subgroup:
|
||||||
return None, value
|
return None, value
|
||||||
subgroup = group
|
|
||||||
result = cls.subgroup_lookup().get(subgroup)
|
|
||||||
|
|
||||||
return result, subgroup or None
|
return cls.subgroup_lookup().get(group), group or None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -136,8 +135,7 @@ def _format_groups(self, groups):
|
|||||||
first = False
|
first = False
|
||||||
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||||
|
|
||||||
group = groups[item]
|
if group := groups[item]:
|
||||||
if group:
|
|
||||||
yield self.format_module(item.value, group)
|
yield self.format_module(item.value, group)
|
||||||
|
|
||||||
if self._collapsible:
|
if self._collapsible:
|
||||||
@@ -253,7 +251,7 @@ class CommitRange:
|
|||||||
''', re.VERBOSE | re.DOTALL)
|
''', re.VERBOSE | re.DOTALL)
|
||||||
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||||
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||||
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert|Improve)\s+([\da-f]{40})')
|
FIXES_RE = re.compile(r'(?i:(?:bug\s*)?fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Improve)\s+([\da-f]{40})')
|
||||||
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||||
|
|
||||||
def __init__(self, start, end, default_author=None):
|
def __init__(self, start, end, default_author=None):
|
||||||
@@ -287,11 +285,16 @@ def _get_commits_and_fixes(self, default_author):
|
|||||||
short = next(lines)
|
short = next(lines)
|
||||||
skip = short.startswith('Release ') or short == '[version] update'
|
skip = short.startswith('Release ') or short == '[version] update'
|
||||||
|
|
||||||
|
fix_commitish = None
|
||||||
|
if match := self.FIXES_RE.search(short):
|
||||||
|
fix_commitish = match.group(1)
|
||||||
|
|
||||||
authors = [default_author] if default_author else []
|
authors = [default_author] if default_author else []
|
||||||
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||||
match = self.AUTHOR_INDICATOR_RE.match(line)
|
if match := self.AUTHOR_INDICATOR_RE.match(line):
|
||||||
if match:
|
|
||||||
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||||
|
if not fix_commitish and (match := self.FIXES_RE.fullmatch(line)):
|
||||||
|
fix_commitish = match.group(1)
|
||||||
|
|
||||||
commit = Commit(commit_hash, short, authors)
|
commit = Commit(commit_hash, short, authors)
|
||||||
if skip and (self._start or not i):
|
if skip and (self._start or not i):
|
||||||
@@ -301,21 +304,17 @@ def _get_commits_and_fixes(self, default_author):
|
|||||||
logger.debug(f'Reached Release commit, breaking: {commit}')
|
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||||
break
|
break
|
||||||
|
|
||||||
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
if match := self.REVERT_RE.fullmatch(commit.short):
|
||||||
if revert_match:
|
reverts[match.group(1)] = commit
|
||||||
reverts[revert_match.group(1)] = commit
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fix_match = self.FIXES_RE.search(commit.short)
|
if fix_commitish:
|
||||||
if fix_match:
|
fixes[fix_commitish].append(commit)
|
||||||
commitish = fix_match.group(1)
|
|
||||||
fixes[commitish].append(commit)
|
|
||||||
|
|
||||||
commits[commit.hash] = commit
|
commits[commit.hash] = commit
|
||||||
|
|
||||||
for commitish, revert_commit in reverts.items():
|
for commitish, revert_commit in reverts.items():
|
||||||
reverted = commits.pop(commitish, None)
|
if reverted := commits.pop(commitish, None):
|
||||||
if reverted:
|
|
||||||
logger.debug(f'{commitish} fully reverted {reverted}')
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||||
else:
|
else:
|
||||||
commits[revert_commit.hash] = revert_commit
|
commits[revert_commit.hash] = revert_commit
|
||||||
@@ -354,6 +353,13 @@ def apply_overrides(self, overrides):
|
|||||||
continue
|
continue
|
||||||
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
||||||
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||||
|
if match := self.FIXES_RE.search(commit.short):
|
||||||
|
fix_commitish = match.group(1)
|
||||||
|
if fix_commitish in self._commits:
|
||||||
|
del self._commits[commit.hash]
|
||||||
|
self._fixes[fix_commitish].append(commit)
|
||||||
|
logger.info(f'Found fix for {fix_commitish[:HASH_LENGTH]}: {commit.hash[:HASH_LENGTH]}')
|
||||||
|
continue
|
||||||
self._commits[commit.hash] = commit
|
self._commits[commit.hash] = commit
|
||||||
|
|
||||||
self._commits = dict(reversed(self._commits.items()))
|
self._commits = dict(reversed(self._commits.items()))
|
||||||
@@ -374,7 +380,7 @@ def groups(self):
|
|||||||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||||
|
|
||||||
if prefix:
|
if prefix:
|
||||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||||
group = next(iter(filter(None, groups)), None)
|
group = next(iter(filter(None, groups)), None)
|
||||||
details = ', '.join(unique(details))
|
details = ', '.join(unique(details))
|
||||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||||
@@ -461,8 +467,7 @@ def create_changelog(args):
|
|||||||
|
|
||||||
logger.info(f'Loaded {len(commits)} commits')
|
logger.info(f'Loaded {len(commits)} commits')
|
||||||
|
|
||||||
new_contributors = get_new_contributors(args.contributors_path, commits)
|
if new_contributors := get_new_contributors(args.contributors_path, commits):
|
||||||
if new_contributors:
|
|
||||||
if args.contributors:
|
if args.contributors:
|
||||||
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||||
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ def main():
|
|||||||
return # This is unused in yt-dlp
|
return # This is unused in yt-dlp
|
||||||
|
|
||||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||||
options, args = parser.parse_args()
|
_, args = parser.parse_args()
|
||||||
if len(args) != 2:
|
if len(args) != 2:
|
||||||
parser.error('Expected an input and an output filename')
|
parser.error('Expected an input and an output filename')
|
||||||
|
|
||||||
|
|||||||
@@ -11,11 +11,13 @@
|
|||||||
|
|
||||||
from devscripts.utils import get_filename_args, read_file, write_file
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
|
||||||
VERBOSE_TMPL = '''
|
VERBOSE = '''
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@@ -32,14 +34,15 @@
|
|||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Request Handlers: urllib, requests
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
[debug] Loaded 1893 extractors
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
@@ -49,20 +52,20 @@
|
|||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
NO_SKIP = '''
|
NO_SKIP = '''
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
|
||||||
required: true
|
|
||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
fields = {'no_skip': NO_SKIP}
|
fields = {
|
||||||
fields['verbose'] = VERBOSE_TMPL % fields
|
'no_skip': NO_SKIP,
|
||||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
'verbose': VERBOSE,
|
||||||
|
'verbose_optional': re.sub(r'(\n\s+validations:)?\n\s+required: true', '', VERBOSE),
|
||||||
|
}
|
||||||
|
|
||||||
infile, outfile = get_filename_args(has_infile=True)
|
infile, outfile = get_filename_args(has_infile=True)
|
||||||
write_file(outfile, read_file(infile) % fields)
|
write_file(outfile, read_file(infile) % fields)
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
@@ -11,6 +10,9 @@
|
|||||||
from inspect import getsource
|
from inspect import getsource
|
||||||
|
|
||||||
from devscripts.utils import get_filename_args, read_file, write_file
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
from yt_dlp.extractor import import_extractors
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||||
|
from yt_dlp.globals import extractors
|
||||||
|
|
||||||
NO_ATTR = object()
|
NO_ATTR = object()
|
||||||
STATIC_CLASS_PROPERTIES = [
|
STATIC_CLASS_PROPERTIES = [
|
||||||
@@ -34,17 +36,12 @@ class {name}({bases}):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
os.environ['YTDLP_NO_PLUGINS'] = 'true'
|
||||||
|
os.environ['YTDLP_NO_LAZY_EXTRACTORS'] = 'true'
|
||||||
|
|
||||||
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||||
if os.path.exists(lazy_extractors_filename):
|
|
||||||
os.remove(lazy_extractors_filename)
|
|
||||||
|
|
||||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
import_extractors()
|
||||||
|
|
||||||
import yt_dlp.plugins
|
|
||||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
|
||||||
|
|
||||||
# Filter out plugins
|
|
||||||
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
|
||||||
|
|
||||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||||
module_src = '\n'.join((
|
module_src = '\n'.join((
|
||||||
@@ -52,26 +49,12 @@ def main():
|
|||||||
' _module = None',
|
' _module = None',
|
||||||
*extra_ie_code(DummyInfoExtractor),
|
*extra_ie_code(DummyInfoExtractor),
|
||||||
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
||||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
*build_ies(list(extractors.value.values()), (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||||
))
|
))
|
||||||
|
|
||||||
write_file(lazy_extractors_filename, f'{module_src}\n')
|
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||||
|
|
||||||
|
|
||||||
def get_all_ies():
|
|
||||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
|
||||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
|
||||||
if os.path.exists(PLUGINS_DIRNAME):
|
|
||||||
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
|
||||||
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
|
||||||
try:
|
|
||||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
|
||||||
finally:
|
|
||||||
if os.path.exists(BLOCKED_DIRNAME):
|
|
||||||
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
|
||||||
return _ALL_CLASSES
|
|
||||||
|
|
||||||
|
|
||||||
def extra_ie_code(ie, base=None):
|
def extra_ie_code(ie, base=None):
|
||||||
for var in STATIC_CLASS_PROPERTIES:
|
for var in STATIC_CLASS_PROPERTIES:
|
||||||
val = getattr(ie, var)
|
val = getattr(ie, var)
|
||||||
@@ -92,7 +75,7 @@ def build_ies(ies, bases, attr_base):
|
|||||||
if ie in ies:
|
if ie in ies:
|
||||||
names.append(ie.__name__)
|
names.append(ie.__name__)
|
||||||
|
|
||||||
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
|
yield '\n_CLASS_LOOKUP = {%s}' % ', '.join(f'{name!r}: {name}' for name in names)
|
||||||
|
|
||||||
|
|
||||||
def sort_ies(ies, ignored_bases):
|
def sort_ies(ies, ignored_bases):
|
||||||
|
|||||||
@@ -10,10 +10,21 @@
|
|||||||
from devscripts.utils import get_filename_args, write_file
|
from devscripts.utils import get_filename_args, write_file
|
||||||
from yt_dlp.extractor import list_extractor_classes
|
from yt_dlp.extractor import list_extractor_classes
|
||||||
|
|
||||||
|
TEMPLATE = '''\
|
||||||
|
# Supported sites
|
||||||
|
|
||||||
|
Below is a list of all extractors that are currently included with yt-dlp.
|
||||||
|
If a site is not listed here, it might still be supported by yt-dlp's embed extraction or generic extractor.
|
||||||
|
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
||||||
|
The only reliable way to check if a site is supported is to try it.
|
||||||
|
|
||||||
|
{ie_list}
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||||
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
write_file(get_filename_args(), TEMPLATE.format(ie_list=out))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -16,7 +16,19 @@
|
|||||||
def parse_args():
|
def parse_args():
|
||||||
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'test', help='a extractor tests, or one of "core" or "download"', nargs='*')
|
'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*')
|
||||||
|
parser.add_argument(
|
||||||
|
'--flaky',
|
||||||
|
action='store_true',
|
||||||
|
default=None,
|
||||||
|
help='Allow running flaky tests. (default: run, unless in CI)',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--no-flaky',
|
||||||
|
action='store_false',
|
||||||
|
dest='flaky',
|
||||||
|
help=argparse.SUPPRESS,
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -24,10 +36,11 @@ def parse_args():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def run_tests(*tests, pattern=None, ci=False):
|
def run_tests(*tests, pattern=None, ci=False, flaky: bool | None = None):
|
||||||
run_core = 'core' in tests or (not pattern and not tests)
|
# XXX: hatch uses `tests` if no arguments are passed
|
||||||
|
run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests)
|
||||||
run_download = 'download' in tests
|
run_download = 'download' in tests
|
||||||
tests = list(map(fix_test_name, tests))
|
run_flaky = flaky or (flaky is None and not ci)
|
||||||
|
|
||||||
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||||
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||||
@@ -41,7 +54,11 @@ def run_tests(*tests, pattern=None, ci=False):
|
|||||||
arguments.extend(['-m', 'download'])
|
arguments.extend(['-m', 'download'])
|
||||||
else:
|
else:
|
||||||
arguments.extend(
|
arguments.extend(
|
||||||
f'test/test_download.py::TestDownload::test_{test}' for test in tests)
|
test if '/' in test
|
||||||
|
else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}'
|
||||||
|
for test in tests)
|
||||||
|
if not run_flaky:
|
||||||
|
arguments.append('--disallow-flaky')
|
||||||
|
|
||||||
print(f'Running {arguments}', flush=True)
|
print(f'Running {arguments}', flush=True)
|
||||||
try:
|
try:
|
||||||
@@ -70,6 +87,11 @@ def run_tests(*tests, pattern=None, ci=False):
|
|||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
os.chdir(Path(__file__).parent.parent)
|
os.chdir(Path(__file__).parent.parent)
|
||||||
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
sys.exit(run_tests(
|
||||||
|
*args.test,
|
||||||
|
pattern=args.k,
|
||||||
|
ci=bool(os.getenv('CI')),
|
||||||
|
flaky=args.flaky,
|
||||||
|
))
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
|
|||||||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||||
|
|
||||||
|
|
||||||
|
def setup_variables(environment):
|
||||||
|
"""
|
||||||
|
`environment` must contain these keys:
|
||||||
|
REPOSITORY, INPUTS, PROCESSED,
|
||||||
|
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||||
|
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||||
|
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||||
|
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN
|
||||||
|
|
||||||
|
`INPUTS` must contain these keys:
|
||||||
|
prerelease
|
||||||
|
|
||||||
|
`PROCESSED` must contain these keys:
|
||||||
|
source_repo, source_tag,
|
||||||
|
target_repo, target_tag
|
||||||
|
"""
|
||||||
|
REPOSITORY = environment['REPOSITORY']
|
||||||
|
INPUTS = json.loads(environment['INPUTS'])
|
||||||
|
PROCESSED = json.loads(environment['PROCESSED'])
|
||||||
|
|
||||||
|
source_channel = None
|
||||||
|
does_not_have_needed_token = False
|
||||||
|
target_repo_token = None
|
||||||
|
pypi_project = None
|
||||||
|
pypi_suffix = None
|
||||||
|
|
||||||
|
source_repo = PROCESSED['source_repo']
|
||||||
|
source_tag = PROCESSED['source_tag']
|
||||||
|
if source_repo == 'stable':
|
||||||
|
source_repo = STABLE_REPOSITORY
|
||||||
|
if not source_repo:
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||||
|
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||||
|
elif not source_tag and '/' not in source_repo:
|
||||||
|
source_tag = source_repo
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
|
||||||
|
resolved_source = source_repo
|
||||||
|
if source_tag:
|
||||||
|
resolved_source = f'{resolved_source}@{source_tag}'
|
||||||
|
elif source_repo == STABLE_REPOSITORY:
|
||||||
|
resolved_source = 'stable'
|
||||||
|
|
||||||
|
revision = None
|
||||||
|
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||||
|
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||||
|
|
||||||
|
version = calculate_version(INPUTS.get('version') or revision)
|
||||||
|
|
||||||
|
target_repo = PROCESSED['target_repo']
|
||||||
|
target_tag = PROCESSED['target_tag']
|
||||||
|
if target_repo:
|
||||||
|
if target_repo == 'stable':
|
||||||
|
target_repo = STABLE_REPOSITORY
|
||||||
|
if not target_tag:
|
||||||
|
if target_repo == STABLE_REPOSITORY:
|
||||||
|
target_tag = version
|
||||||
|
elif environment['TARGET_ARCHIVE_REPO']:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
else:
|
||||||
|
target_tag = target_repo
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
if target_repo != REPOSITORY:
|
||||||
|
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||||
|
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
if source_channel:
|
||||||
|
target_repo = source_channel
|
||||||
|
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
|
||||||
|
if does_not_have_needed_token:
|
||||||
|
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||||
|
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||||
|
return None
|
||||||
|
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||||
|
|
||||||
|
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||||
|
pypi_project = environment['PYPI_PROJECT'] or None
|
||||||
|
|
||||||
|
return {
|
||||||
|
'channel': resolved_source,
|
||||||
|
'version': version,
|
||||||
|
'target_repo': target_repo,
|
||||||
|
'target_repo_token': target_repo_token,
|
||||||
|
'target_tag': target_tag,
|
||||||
|
'pypi_project': pypi_project,
|
||||||
|
'pypi_suffix': pypi_suffix,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def process_inputs(inputs):
|
||||||
|
outputs = {}
|
||||||
|
for key in ('source', 'target'):
|
||||||
|
repo, _, tag = inputs.get(key, '').partition('@')
|
||||||
|
outputs[f'{key}_repo'] = repo
|
||||||
|
outputs[f'{key}_tag'] = tag
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if not os.getenv('GITHUB_OUTPUT'):
|
||||||
|
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if 'process_inputs' in sys.argv:
|
||||||
|
inputs = json.loads(os.environ['INPUTS'])
|
||||||
|
print('::group::Inputs')
|
||||||
|
print(json.dumps(inputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
outputs = process_inputs(inputs)
|
||||||
|
print('::group::Processed')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
outputs = setup_variables(dict(os.environ))
|
||||||
|
if not outputs:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print('::group::Output variables')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||||
|
inp = inputs.copy()
|
||||||
|
inp.setdefault('linux_armv7l', True)
|
||||||
|
inp.setdefault('prerelease', False)
|
||||||
|
processed = process_inputs(inp)
|
||||||
|
source_repo = processed['source_repo'].upper()
|
||||||
|
target_repo = processed['target_repo'].upper()
|
||||||
|
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||||
|
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
# Keep this in sync with prepare.setup_variables in release.yml
|
||||||
|
'INPUTS': json.dumps(inp),
|
||||||
|
'PROCESSED': json.dumps(processed),
|
||||||
|
'REPOSITORY': github_repository,
|
||||||
|
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||||
|
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||||
|
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||||
|
}
|
||||||
|
|
||||||
|
result = setup_variables(env)
|
||||||
|
if not expected:
|
||||||
|
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||||
|
return
|
||||||
|
|
||||||
|
exp = expected.copy()
|
||||||
|
if ignore_revision:
|
||||||
|
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||||
|
version_is_tag = result['version'] == result['target_tag']
|
||||||
|
for dct in (result, exp):
|
||||||
|
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||||
|
if version_is_tag:
|
||||||
|
dct['target_tag'] = dct['version']
|
||||||
|
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_variables():
|
||||||
|
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||||
|
DEFAULT_VERSION = calculate_version()
|
||||||
|
BASE_REPO_VARS = {
|
||||||
|
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||||
|
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||||
|
'PUSH_VERSION_COMMIT': '1',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp',
|
||||||
|
}
|
||||||
|
BASE_REPO_SECRETS = {
|
||||||
|
'ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}
|
||||||
|
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||||
|
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||||
|
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': 'stable@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||||
|
{}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||||
|
{}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'nightly',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'master',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||||
|
{}, {}, {'version': '123'}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
}, {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||||
|
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
'MASTER_PYPI_SUFFIX': 'dev',
|
||||||
|
}, {
|
||||||
|
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp-test',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||||
|
{}, {}, {'source': 'experimental'}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'stable',
|
||||||
|
'target': 'experimental',
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
@@ -9,24 +9,9 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime as dt
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from devscripts.utils import read_version, run_process, write_file
|
from devscripts.utils import calculate_version, run_process, write_file
|
||||||
|
|
||||||
|
|
||||||
def get_new_version(version, revision):
|
|
||||||
if not version:
|
|
||||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
|
||||||
|
|
||||||
if revision:
|
|
||||||
assert revision.isdecimal(), 'Revision must be a number'
|
|
||||||
else:
|
|
||||||
old_version = read_version().split('.')
|
|
||||||
if version.split('.') == old_version[:3]:
|
|
||||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
|
||||||
|
|
||||||
return f'{version}.{revision}' if revision else version
|
|
||||||
|
|
||||||
|
|
||||||
def get_git_head():
|
def get_git_head():
|
||||||
@@ -72,9 +57,7 @@ def get_git_head():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
git_head = get_git_head()
|
git_head = get_git_head()
|
||||||
version = (
|
version = calculate_version(args.version)
|
||||||
args.version if args.version and '.' in args.version
|
|
||||||
else get_new_version(None, args.version))
|
|
||||||
write_file(args.output, VERSION_TEMPLATE.format(
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||||
package_version=f'{version}{args.suffix}'))
|
package_version=f'{version}{args.suffix}'))
|
||||||
|
|||||||
@@ -20,7 +20,9 @@
|
|||||||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
help='path to the Changelog file')
|
help='path to the Changelog file')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
new_entry = create_changelog(args)
|
|
||||||
|
|
||||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
current_version = read_version()
|
||||||
|
if current_version != changelog.splitlines()[0]:
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||||
|
|||||||
166
devscripts/update_ejs.py
Executable file
166
devscripts/update_ejs.py
Executable file
@@ -0,0 +1,166 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import pathlib
|
||||||
|
import urllib.request
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATE = '''\
|
||||||
|
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
||||||
|
|
||||||
|
VERSION = {version!r}
|
||||||
|
HASHES = {{
|
||||||
|
{hash_mapping}
|
||||||
|
}}
|
||||||
|
'''
|
||||||
|
PREFIX = ' "yt-dlp-ejs=='
|
||||||
|
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||||
|
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||||
|
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||||
|
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||||
|
ASSETS = {
|
||||||
|
'yt.solver.lib.js': False,
|
||||||
|
'yt.solver.lib.min.js': False,
|
||||||
|
'yt.solver.deno.lib.js': True,
|
||||||
|
'yt.solver.bun.lib.js': True,
|
||||||
|
'yt.solver.core.min.js': False,
|
||||||
|
'yt.solver.core.js': True,
|
||||||
|
}
|
||||||
|
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||||
|
|
||||||
|
|
||||||
|
def request(url: str):
|
||||||
|
return contextlib.closing(urllib.request.urlopen(url))
|
||||||
|
|
||||||
|
|
||||||
|
def makefile_variables(
|
||||||
|
version: str | None = None,
|
||||||
|
name: str | None = None,
|
||||||
|
digest: str | None = None,
|
||||||
|
data: bytes | None = None,
|
||||||
|
keys_only: bool = False,
|
||||||
|
) -> dict[str, str | None]:
|
||||||
|
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
||||||
|
|
||||||
|
return {
|
||||||
|
'EJS_VERSION': None if keys_only else version,
|
||||||
|
'EJS_WHEEL_NAME': None if keys_only else name,
|
||||||
|
'EJS_WHEEL_HASH': None if keys_only else digest,
|
||||||
|
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
||||||
|
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
||||||
|
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
||||||
|
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def list_wheel_contents(
|
||||||
|
wheel_data: bytes,
|
||||||
|
suffix: str | None = None,
|
||||||
|
folders: bool = True,
|
||||||
|
files: bool = True,
|
||||||
|
) -> str:
|
||||||
|
assert folders or files, 'at least one of "folders" or "files" must be True'
|
||||||
|
|
||||||
|
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
||||||
|
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
||||||
|
|
||||||
|
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
||||||
|
if suffix:
|
||||||
|
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
||||||
|
|
||||||
|
files_list = list(filtered)
|
||||||
|
if not folders:
|
||||||
|
return ' '.join(files_list)
|
||||||
|
|
||||||
|
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
||||||
|
if not files:
|
||||||
|
return ' '.join(folders_list)
|
||||||
|
|
||||||
|
return ' '.join(folders_list + files_list)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
current_version = None
|
||||||
|
with PYPROJECT_PATH.open() as file:
|
||||||
|
for line in file:
|
||||||
|
if not line.startswith(PREFIX):
|
||||||
|
continue
|
||||||
|
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||||
|
|
||||||
|
if not current_version:
|
||||||
|
print('yt-dlp-ejs dependency line could not be found')
|
||||||
|
return
|
||||||
|
|
||||||
|
makefile_info = makefile_variables(keys_only=True)
|
||||||
|
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||||
|
with MAKEFILE_PATH.open() as file:
|
||||||
|
for line in file:
|
||||||
|
if not line.startswith(prefixes):
|
||||||
|
continue
|
||||||
|
key, _, val = line.partition(' = ')
|
||||||
|
makefile_info[key] = val.rstrip()
|
||||||
|
|
||||||
|
with request(RELEASE_URL) as resp:
|
||||||
|
info = json.load(resp)
|
||||||
|
|
||||||
|
version = info['tag_name']
|
||||||
|
if version == current_version:
|
||||||
|
print(f'yt-dlp-ejs is up to date! ({version})')
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f'Updating yt-dlp-ejs from {current_version} to {version}')
|
||||||
|
hashes = []
|
||||||
|
wheel_info = {}
|
||||||
|
for asset in info['assets']:
|
||||||
|
name = asset['name']
|
||||||
|
is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl')
|
||||||
|
if not is_wheel and name not in ASSETS:
|
||||||
|
continue
|
||||||
|
with request(asset['browser_download_url']) as resp:
|
||||||
|
data = resp.read()
|
||||||
|
|
||||||
|
# verify digest from github
|
||||||
|
digest = asset['digest']
|
||||||
|
algo, _, expected = digest.partition(':')
|
||||||
|
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||||
|
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||||
|
|
||||||
|
if is_wheel:
|
||||||
|
wheel_info = makefile_variables(version, name, digest, data)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# calculate sha3-512 digest
|
||||||
|
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||||
|
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||||
|
|
||||||
|
if ASSETS[name]:
|
||||||
|
(PACKAGE_PATH / name).write_bytes(data)
|
||||||
|
|
||||||
|
hash_mapping = '\n'.join(hashes)
|
||||||
|
for asset_name in ASSETS:
|
||||||
|
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||||
|
|
||||||
|
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||||
|
|
||||||
|
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
||||||
|
version=version,
|
||||||
|
hash_mapping=hash_mapping,
|
||||||
|
))
|
||||||
|
|
||||||
|
content = PYPROJECT_PATH.read_text()
|
||||||
|
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||||
|
PYPROJECT_PATH.write_text(updated)
|
||||||
|
|
||||||
|
makefile = MAKEFILE_PATH.read_text()
|
||||||
|
for key in wheel_info:
|
||||||
|
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||||
|
MAKEFILE_PATH.write_text(makefile)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
import datetime as dt
|
||||||
import functools
|
import functools
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
|||||||
return items[varname]
|
return items[varname]
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||||
|
if version and '.' in version:
|
||||||
|
return version
|
||||||
|
|
||||||
|
revision = version
|
||||||
|
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
|
if revision:
|
||||||
|
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||||
|
else:
|
||||||
|
old_version = read_version(fname=fname).split('.')
|
||||||
|
if version.split('.') == old_version[:3]:
|
||||||
|
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||||
|
|
||||||
|
return f'{version}.{revision}' if revision else version
|
||||||
|
|
||||||
|
|
||||||
def get_filename_args(has_infile=False, default_outfile=None):
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
if has_infile:
|
if has_infile:
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ def build_completion(opt_parser):
|
|||||||
for opt in group.option_list]
|
for opt in group.option_list]
|
||||||
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
||||||
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
||||||
|
opts_path = [opt for opt in opts if opt.metavar == 'PATH']
|
||||||
|
|
||||||
fileopts = []
|
fileopts = []
|
||||||
for opt in opts_file:
|
for opt in opts_file:
|
||||||
@@ -26,6 +27,12 @@ def build_completion(opt_parser):
|
|||||||
if opt._long_opts:
|
if opt._long_opts:
|
||||||
fileopts.extend(opt._long_opts)
|
fileopts.extend(opt._long_opts)
|
||||||
|
|
||||||
|
for opt in opts_path:
|
||||||
|
if opt._short_opts:
|
||||||
|
fileopts.extend(opt._short_opts)
|
||||||
|
if opt._long_opts:
|
||||||
|
fileopts.extend(opt._long_opts)
|
||||||
|
|
||||||
diropts = []
|
diropts = []
|
||||||
for opt in opts_dir:
|
for opt in opts_dir:
|
||||||
if opt._short_opts:
|
if opt._short_opts:
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["hatchling"]
|
requires = ["hatchling>=1.27.0"]
|
||||||
build-backend = "hatchling.build"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "yt-dlp"
|
name = "yt-dlp"
|
||||||
maintainers = [
|
authors = [
|
||||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||||
|
]
|
||||||
|
maintainers = [
|
||||||
|
{email = "maintainers@yt-dlp.org"},
|
||||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||||
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||||
@@ -13,50 +16,50 @@ maintainers = [
|
|||||||
]
|
]
|
||||||
description = "A feature-rich command-line audio/video downloader"
|
description = "A feature-rich command-line audio/video downloader"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.10"
|
||||||
keywords = [
|
keywords = [
|
||||||
|
"cli",
|
||||||
|
"downloader",
|
||||||
"youtube-dl",
|
"youtube-dl",
|
||||||
"video-downloader",
|
|
||||||
"youtube-downloader",
|
"youtube-downloader",
|
||||||
"sponsorblock",
|
"sponsorblock",
|
||||||
"youtube-dlc",
|
|
||||||
"yt-dlp",
|
"yt-dlp",
|
||||||
]
|
]
|
||||||
license = {file = "LICENSE"}
|
license = "Unlicense"
|
||||||
|
license-files = ["LICENSE"]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Topic :: Multimedia :: Video",
|
"Topic :: Multimedia :: Video",
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.14",
|
||||||
"Programming Language :: Python :: Implementation",
|
"Programming Language :: Python :: Implementation",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
dependencies = [
|
dependencies = []
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
default = [
|
||||||
"brotli; implementation_name=='cpython'",
|
"brotli; implementation_name=='cpython'",
|
||||||
"brotlicffi; implementation_name!='cpython'",
|
"brotlicffi; implementation_name!='cpython'",
|
||||||
"certifi",
|
"certifi",
|
||||||
"mutagen",
|
"mutagen",
|
||||||
"pycryptodomex",
|
"pycryptodomex",
|
||||||
"requests>=2.32.2,<3",
|
"requests>=2.32.2,<3",
|
||||||
"urllib3>=1.26.17,<3",
|
"urllib3>=2.0.2,<3",
|
||||||
"websockets>=12.0",
|
"websockets>=13.0",
|
||||||
|
"yt-dlp-ejs==0.3.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
default = []
|
|
||||||
curl-cffi = [
|
curl-cffi = [
|
||||||
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'",
|
||||||
"curl-cffi>=0.5.10,!=0.6.*,<0.8; os_name!='nt' and implementation_name=='cpython'",
|
|
||||||
]
|
]
|
||||||
secretstorage = [
|
secretstorage = [
|
||||||
"cffi",
|
"cffi",
|
||||||
@@ -64,9 +67,9 @@ secretstorage = [
|
|||||||
]
|
]
|
||||||
build = [
|
build = [
|
||||||
"build",
|
"build",
|
||||||
"hatchling",
|
"hatchling>=1.27.0",
|
||||||
"pip",
|
"pip",
|
||||||
"setuptools",
|
"setuptools>=71.0.2",
|
||||||
"wheel",
|
"wheel",
|
||||||
]
|
]
|
||||||
dev = [
|
dev = [
|
||||||
@@ -76,23 +79,21 @@ dev = [
|
|||||||
]
|
]
|
||||||
static-analysis = [
|
static-analysis = [
|
||||||
"autopep8~=2.0",
|
"autopep8~=2.0",
|
||||||
"ruff~=0.5.0",
|
"ruff~=0.14.0",
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
"pytest~=8.1",
|
"pytest~=8.1",
|
||||||
|
"pytest-rerunfailures~=14.0",
|
||||||
]
|
]
|
||||||
pyinstaller = [
|
pyinstaller = [
|
||||||
"pyinstaller>=6.7.0", # for compat with setuptools>=70
|
"pyinstaller>=6.17.0", # 6.17.0+ needed for compat with setuptools 81+
|
||||||
]
|
|
||||||
py2exe = [
|
|
||||||
"py2exe>=0.12",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers"
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
yt-dlp = "yt_dlp:main"
|
yt-dlp = "yt_dlp:main"
|
||||||
@@ -110,7 +111,6 @@ include = [
|
|||||||
"/LICENSE", # included as license
|
"/LICENSE", # included as license
|
||||||
"/pyproject.toml", # included by default
|
"/pyproject.toml", # included by default
|
||||||
"/README.md", # included as readme
|
"/README.md", # included as readme
|
||||||
"/setup.cfg",
|
|
||||||
"/supportedsites.md",
|
"/supportedsites.md",
|
||||||
]
|
]
|
||||||
artifacts = [
|
artifacts = [
|
||||||
@@ -123,7 +123,12 @@ artifacts = [
|
|||||||
|
|
||||||
[tool.hatch.build.targets.wheel]
|
[tool.hatch.build.targets.wheel]
|
||||||
packages = ["yt_dlp"]
|
packages = ["yt_dlp"]
|
||||||
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
artifacts = [
|
||||||
|
"/yt_dlp/extractor/lazy_extractors.py",
|
||||||
|
]
|
||||||
|
exclude = [
|
||||||
|
"/yt_dlp/**/*.md",
|
||||||
|
]
|
||||||
|
|
||||||
[tool.hatch.build.targets.wheel.shared-data]
|
[tool.hatch.build.targets.wheel.shared-data]
|
||||||
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||||
@@ -162,7 +167,6 @@ lint-fix = "ruff check --fix {args:.}"
|
|||||||
features = ["test"]
|
features = ["test"]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pytest-randomly~=3.15",
|
"pytest-randomly~=3.15",
|
||||||
"pytest-rerunfailures~=14.0",
|
|
||||||
"pytest-xdist[psutil]~=3.5",
|
"pytest-xdist[psutil]~=3.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -172,14 +176,12 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
|||||||
|
|
||||||
[[tool.hatch.envs.hatch-test.matrix]]
|
[[tool.hatch.envs.hatch-test.matrix]]
|
||||||
python = [
|
python = [
|
||||||
"3.8",
|
|
||||||
"3.9",
|
|
||||||
"3.10",
|
"3.10",
|
||||||
"3.11",
|
"3.11",
|
||||||
"3.12",
|
"3.12",
|
||||||
"pypy3.8",
|
"3.13",
|
||||||
"pypy3.9",
|
"3.14",
|
||||||
"pypy3.10",
|
"pypy3.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
@@ -191,6 +193,7 @@ ignore = [
|
|||||||
"E501", # line-too-long
|
"E501", # line-too-long
|
||||||
"E731", # lambda-assignment
|
"E731", # lambda-assignment
|
||||||
"E741", # ambiguous-variable-name
|
"E741", # ambiguous-variable-name
|
||||||
|
"UP031", # printf-string-formatting
|
||||||
"UP036", # outdated-version-block
|
"UP036", # outdated-version-block
|
||||||
"B006", # mutable-argument-default
|
"B006", # mutable-argument-default
|
||||||
"B008", # function-call-in-default-argument
|
"B008", # function-call-in-default-argument
|
||||||
@@ -199,6 +202,7 @@ ignore = [
|
|||||||
"B023", # function-uses-loop-variable (false positives)
|
"B023", # function-uses-loop-variable (false positives)
|
||||||
"B028", # no-explicit-stacklevel
|
"B028", # no-explicit-stacklevel
|
||||||
"B904", # raise-without-from-inside-except
|
"B904", # raise-without-from-inside-except
|
||||||
|
"A005", # stdlib-module-shadowing
|
||||||
"C401", # unnecessary-generator-set
|
"C401", # unnecessary-generator-set
|
||||||
"C402", # unnecessary-generator-dict
|
"C402", # unnecessary-generator-dict
|
||||||
"PIE790", # unnecessary-placeholder
|
"PIE790", # unnecessary-placeholder
|
||||||
@@ -214,10 +218,12 @@ ignore = [
|
|||||||
"TD001", # invalid-todo-tag
|
"TD001", # invalid-todo-tag
|
||||||
"TD002", # missing-todo-author
|
"TD002", # missing-todo-author
|
||||||
"TD003", # missing-todo-link
|
"TD003", # missing-todo-link
|
||||||
|
"PLC0415", # import-outside-top-level
|
||||||
"PLE0604", # invalid-all-object (false positives)
|
"PLE0604", # invalid-all-object (false positives)
|
||||||
"PLE0643", # potential-index-error (false positives)
|
"PLE0643", # potential-index-error (false positives)
|
||||||
"PLW0603", # global-statement
|
"PLW0603", # global-statement
|
||||||
"PLW1510", # subprocess-run-without-check
|
"PLW1510", # subprocess-run-without-check
|
||||||
|
"PLW1641", # eq-without-hash
|
||||||
"PLW2901", # redefined-loop-name
|
"PLW2901", # redefined-loop-name
|
||||||
"RUF001", # ambiguous-unicode-character-string
|
"RUF001", # ambiguous-unicode-character-string
|
||||||
"RUF012", # mutable-class-default
|
"RUF012", # mutable-class-default
|
||||||
@@ -263,9 +269,6 @@ select = [
|
|||||||
"A002", # builtin-argument-shadowing
|
"A002", # builtin-argument-shadowing
|
||||||
"C408", # unnecessary-collection-call
|
"C408", # unnecessary-collection-call
|
||||||
]
|
]
|
||||||
"yt_dlp/jsinterp.py" = [
|
|
||||||
"UP031", # printf-string-formatting
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
[tool.ruff.lint.isort]
|
||||||
known-first-party = [
|
known-first-party = [
|
||||||
@@ -318,6 +321,17 @@ banned-from = [
|
|||||||
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
||||||
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
||||||
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||||
|
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
||||||
|
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
||||||
|
"yt_dlp.utils.jwt_encode_hs256".msg = "Use `yt_dlp.utils.jwt_encode` instead."
|
||||||
|
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
||||||
|
"yt_dlp.compat.compat_os_name".msg = "Use `os.name` instead."
|
||||||
|
"yt_dlp.compat.compat_realpath".msg = "Use `os.path.realpath` instead."
|
||||||
|
"yt_dlp.compat.functools".msg = "Use `functools` instead."
|
||||||
|
"yt_dlp.utils.decodeOption".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.compiled_regex_type".msg = "Use `re.Pattern` instead."
|
||||||
|
|
||||||
[tool.autopep8]
|
[tool.autopep8]
|
||||||
max_line_length = 120
|
max_line_length = 120
|
||||||
@@ -380,9 +394,14 @@ select = [
|
|||||||
"W391",
|
"W391",
|
||||||
"W504",
|
"W504",
|
||||||
]
|
]
|
||||||
|
exclude = "*/extractor/lazy_extractors.py,*venv*,*/test/testdata/sigs/player-*.js,.idea,.vscode"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "-ra -v --strict-markers"
|
addopts = [
|
||||||
|
"-ra", # summary: all except passed
|
||||||
|
"--verbose",
|
||||||
|
"--strict-markers",
|
||||||
|
]
|
||||||
markers = [
|
markers = [
|
||||||
"download",
|
"download",
|
||||||
]
|
]
|
||||||
|
|||||||
39
setup.cfg
39
setup.cfg
@@ -1,39 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude = build,venv,.tox,.git,.pytest_cache
|
|
||||||
ignore = E402,E501,E731,E741,W503
|
|
||||||
max_line_length = 120
|
|
||||||
per_file_ignores =
|
|
||||||
devscripts/lazy_load_template.py: F401
|
|
||||||
|
|
||||||
|
|
||||||
[autoflake]
|
|
||||||
ignore-init-module-imports = true
|
|
||||||
ignore-pass-after-docstring = true
|
|
||||||
remove-all-unused-imports = true
|
|
||||||
remove-duplicate-keys = true
|
|
||||||
remove-unused-variables = true
|
|
||||||
|
|
||||||
|
|
||||||
[tox:tox]
|
|
||||||
skipsdist = true
|
|
||||||
envlist = py{38,39,310,311,312},pypy{38,39,310}
|
|
||||||
skip_missing_interpreters = true
|
|
||||||
|
|
||||||
[testenv] # tox
|
|
||||||
deps =
|
|
||||||
pytest
|
|
||||||
commands = pytest {posargs:"-m not download"}
|
|
||||||
passenv = HOME # For test_compat_expanduser
|
|
||||||
setenv =
|
|
||||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
|
||||||
|
|
||||||
|
|
||||||
[isort]
|
|
||||||
py_version = 38
|
|
||||||
multi_line_output = VERTICAL_HANGING_INDENT
|
|
||||||
line_length = 80
|
|
||||||
reverse_relative = true
|
|
||||||
ensure_newline_before_comments = true
|
|
||||||
include_trailing_comma = true
|
|
||||||
known_first_party =
|
|
||||||
test
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -52,6 +52,33 @@ def skip_handlers_if(request, handler):
|
|||||||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def handler_flaky(request, handler):
|
||||||
|
"""Mark a certain handler as being flaky.
|
||||||
|
|
||||||
|
This will skip the test if pytest does not get run using `--allow-flaky`
|
||||||
|
|
||||||
|
usage:
|
||||||
|
pytest.mark.handler_flaky('my_handler', os.name != 'nt', reason='reason')
|
||||||
|
"""
|
||||||
|
for marker in request.node.iter_markers(handler_flaky.__name__):
|
||||||
|
if (
|
||||||
|
marker.args[0] == handler.RH_KEY
|
||||||
|
and (not marker.args[1:] or any(marker.args[1:]))
|
||||||
|
and request.config.getoption('disallow_flaky')
|
||||||
|
):
|
||||||
|
reason = marker.kwargs.get('reason')
|
||||||
|
pytest.skip(f'flaky: {reason}' if reason else 'flaky')
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_addoption(parser, pluginmanager):
|
||||||
|
parser.addoption(
|
||||||
|
'--disallow-flaky',
|
||||||
|
action='store_true',
|
||||||
|
help='disallow flaky tests from running.',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
config.addinivalue_line(
|
config.addinivalue_line(
|
||||||
'markers', 'skip_handler(handler): skip test for the given handler',
|
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||||
@@ -62,3 +89,6 @@ def pytest_configure(config):
|
|||||||
config.addinivalue_line(
|
config.addinivalue_line(
|
||||||
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||||
)
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'handler_flaky(handler): mark handler as flaky if condition is true',
|
||||||
|
)
|
||||||
|
|||||||
201
test/helper.py
201
test/helper.py
@@ -9,7 +9,6 @@
|
|||||||
|
|
||||||
import yt_dlp.extractor
|
import yt_dlp.extractor
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_os_name
|
|
||||||
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
||||||
|
|
||||||
if 'pytest' in sys.modules:
|
if 'pytest' in sys.modules:
|
||||||
@@ -49,7 +48,7 @@ def report_warning(message, *args, **kwargs):
|
|||||||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||||
If stderr is a tty file the 'WARNING:' will be colored
|
If stderr is a tty file the 'WARNING:' will be colored
|
||||||
"""
|
"""
|
||||||
if sys.stderr.isatty() and compat_os_name != 'nt':
|
if sys.stderr.isatty() and os.name != 'nt':
|
||||||
_msg_header = '\033[0;33mWARNING:\033[0m'
|
_msg_header = '\033[0;33mWARNING:\033[0m'
|
||||||
else:
|
else:
|
||||||
_msg_header = 'WARNING:'
|
_msg_header = 'WARNING:'
|
||||||
@@ -102,87 +101,109 @@ def getwebpagetestcases():
|
|||||||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def expect_value(self, got, expected, field):
|
def _iter_differences(got, expected, field):
|
||||||
if isinstance(expected, str) and expected.startswith('re:'):
|
if isinstance(expected, str):
|
||||||
match_str = expected[len('re:'):]
|
op, _, val = expected.partition(':')
|
||||||
match_rex = re.compile(match_str)
|
if op in ('mincount', 'maxcount', 'count'):
|
||||||
|
if not isinstance(got, (list, dict)):
|
||||||
self.assertTrue(
|
yield field, f'expected either {list.__name__} or {dict.__name__}, got {type(got).__name__}'
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
match_rex.match(got),
|
|
||||||
f'field {field} (value: {got!r}) should match {match_str!r}')
|
|
||||||
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
|
||||||
start_str = expected[len('startswith:'):]
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
got.startswith(start_str),
|
|
||||||
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
|
||||||
elif isinstance(expected, str) and expected.startswith('contains:'):
|
|
||||||
contains_str = expected[len('contains:'):]
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
contains_str in got,
|
|
||||||
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
|
||||||
elif isinstance(expected, type):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, expected),
|
|
||||||
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
|
|
||||||
elif isinstance(expected, dict) and isinstance(got, dict):
|
|
||||||
expect_dict(self, got, expected)
|
|
||||||
elif isinstance(expected, list) and isinstance(got, list):
|
|
||||||
self.assertEqual(
|
|
||||||
len(expected), len(got),
|
|
||||||
f'Expect a list of length {len(expected)}, but got a list of length {len(got)} for field {field}')
|
|
||||||
for index, (item_got, item_expected) in enumerate(zip(got, expected)):
|
|
||||||
type_got = type(item_got)
|
|
||||||
type_expected = type(item_expected)
|
|
||||||
self.assertEqual(
|
|
||||||
type_expected, type_got,
|
|
||||||
f'Type mismatch for list item at index {index} for field {field}, '
|
|
||||||
f'expected {type_expected!r}, got {type_got!r}')
|
|
||||||
expect_value(self, item_got, item_expected, field)
|
|
||||||
else:
|
|
||||||
if isinstance(expected, str) and expected.startswith('md5:'):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
|
||||||
got = 'md5:' + md5(got)
|
|
||||||
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, (list, dict)),
|
|
||||||
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
|
||||||
op, _, expected_num = expected.partition(':')
|
|
||||||
expected_num = int(expected_num)
|
|
||||||
if op == 'mincount':
|
|
||||||
assert_func = assertGreaterEqual
|
|
||||||
msg_tmpl = 'Expected %d items in field %s, but only got %d'
|
|
||||||
elif op == 'maxcount':
|
|
||||||
assert_func = assertLessEqual
|
|
||||||
msg_tmpl = 'Expected maximum %d items in field %s, but got %d'
|
|
||||||
elif op == 'count':
|
|
||||||
assert_func = assertEqual
|
|
||||||
msg_tmpl = 'Expected exactly %d items in field %s, but got %d'
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
assert_func(
|
|
||||||
self, len(got), expected_num,
|
|
||||||
msg_tmpl % (expected_num, field, len(got)))
|
|
||||||
return
|
return
|
||||||
self.assertEqual(
|
|
||||||
expected, got,
|
expected_num = int(val)
|
||||||
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
|
got_num = len(got)
|
||||||
|
if op == 'mincount':
|
||||||
|
if got_num < expected_num:
|
||||||
|
yield field, f'expected at least {val} items, got {got_num}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'maxcount':
|
||||||
|
if got_num > expected_num:
|
||||||
|
yield field, f'expected at most {val} items, got {got_num}'
|
||||||
|
return
|
||||||
|
|
||||||
|
assert op == 'count'
|
||||||
|
if got_num != expected_num:
|
||||||
|
yield field, f'expected exactly {val} items, got {got_num}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if not isinstance(got, str):
|
||||||
|
yield field, f'expected {str.__name__}, got {type(got).__name__}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 're':
|
||||||
|
if not re.match(val, got):
|
||||||
|
yield field, f'should match {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'startswith':
|
||||||
|
if not got.startswith(val):
|
||||||
|
yield field, f'should start with {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'contains':
|
||||||
|
if not val.startswith(got):
|
||||||
|
yield field, f'should contain {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'md5':
|
||||||
|
hash_val = md5(got)
|
||||||
|
if hash_val != val:
|
||||||
|
yield field, f'expected hash {val}, got {hash_val}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if got != expected:
|
||||||
|
yield field, f'expected {expected!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, dict) and isinstance(got, dict):
|
||||||
|
for key, expected_val in expected.items():
|
||||||
|
if key not in got:
|
||||||
|
yield field, f'missing key: {key!r}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_name = key if field is None else f'{field}.{key}'
|
||||||
|
yield from _iter_differences(got[key], expected_val, field_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, type):
|
||||||
|
if not isinstance(got, expected):
|
||||||
|
yield field, f'expected {expected.__name__}, got {type(got).__name__}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, list) and isinstance(got, list):
|
||||||
|
# TODO: clever diffing algorithm lmao
|
||||||
|
if len(expected) != len(got):
|
||||||
|
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||||
|
return
|
||||||
|
|
||||||
|
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||||
|
field_name = str(index) if field is None else f'{field}.{index}'
|
||||||
|
yield from _iter_differences(got_val, expected_val, field_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
if got != expected:
|
||||||
|
yield field, f'expected {expected!r}, got {got!r}'
|
||||||
|
|
||||||
|
|
||||||
|
def _expect_value(message, got, expected, field):
|
||||||
|
mismatches = list(_iter_differences(got, expected, field))
|
||||||
|
if not mismatches:
|
||||||
|
return
|
||||||
|
|
||||||
|
fields = [field for field, _ in mismatches if field is not None]
|
||||||
|
return ''.join((
|
||||||
|
message, f' ({", ".join(fields)})' if fields else '',
|
||||||
|
*(f'\n\t{field}: {message}' for field, message in mismatches)))
|
||||||
|
|
||||||
|
|
||||||
|
def expect_value(self, got, expected, field):
|
||||||
|
if message := _expect_value('values differ', got, expected, field):
|
||||||
|
self.fail(message)
|
||||||
|
|
||||||
|
|
||||||
def expect_dict(self, got_dict, expected_dict):
|
def expect_dict(self, got_dict, expected_dict):
|
||||||
for info_field, expected in expected_dict.items():
|
if message := _expect_value('dictionaries differ', got_dict, expected_dict, None):
|
||||||
got = got_dict.get(info_field)
|
self.fail(message)
|
||||||
expect_value(self, got, expected, info_field)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_got_info_dict(got_dict):
|
def sanitize_got_info_dict(got_dict):
|
||||||
@@ -238,6 +259,20 @@ def sanitize(key, value):
|
|||||||
|
|
||||||
|
|
||||||
def expect_info_dict(self, got_dict, expected_dict):
|
def expect_info_dict(self, got_dict, expected_dict):
|
||||||
|
ALLOWED_KEYS_SORT_ORDER = (
|
||||||
|
# NB: Keep in sync with the docstring of extractor/common.py
|
||||||
|
'id', 'ext', 'direct', 'display_id', 'title', 'alt_title', 'description', 'media_type',
|
||||||
|
'uploader', 'uploader_id', 'uploader_url', 'channel', 'channel_id', 'channel_url', 'channel_is_verified',
|
||||||
|
'channel_follower_count', 'comment_count', 'view_count', 'concurrent_view_count',
|
||||||
|
'like_count', 'dislike_count', 'repost_count', 'average_rating', 'age_limit', 'duration', 'thumbnail', 'heatmap',
|
||||||
|
'chapters', 'chapter', 'chapter_number', 'chapter_id', 'start_time', 'end_time', 'section_start', 'section_end',
|
||||||
|
'categories', 'tags', 'cast', 'composers', 'artists', 'album_artists', 'creators', 'genres',
|
||||||
|
'track', 'track_number', 'track_id', 'album', 'album_type', 'disc_number',
|
||||||
|
'series', 'series_id', 'season', 'season_number', 'season_id', 'episode', 'episode_number', 'episode_id',
|
||||||
|
'timestamp', 'upload_date', 'release_timestamp', 'release_date', 'release_year', 'modified_timestamp', 'modified_date',
|
||||||
|
'playable_in_embed', 'availability', 'live_status', 'location', 'license', '_old_archive_ids',
|
||||||
|
)
|
||||||
|
|
||||||
expect_dict(self, got_dict, expected_dict)
|
expect_dict(self, got_dict, expected_dict)
|
||||||
# Check for the presence of mandatory fields
|
# Check for the presence of mandatory fields
|
||||||
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
||||||
@@ -253,7 +288,13 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
|
|
||||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||||
|
|
||||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
# Check for invalid/misspelled field names being returned by the extractor
|
||||||
|
invalid_keys = sorted(test_info_dict.keys() - ALLOWED_KEYS_SORT_ORDER)
|
||||||
|
self.assertFalse(invalid_keys, f'Invalid fields returned by the extractor: {", ".join(invalid_keys)}')
|
||||||
|
|
||||||
|
missing_keys = sorted(
|
||||||
|
test_info_dict.keys() - expected_dict.keys(),
|
||||||
|
key=lambda x: ALLOWED_KEYS_SORT_ORDER.index(x))
|
||||||
if missing_keys:
|
if missing_keys:
|
||||||
def _repr(v):
|
def _repr(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
|
|||||||
@@ -36,7 +36,6 @@
|
|||||||
"verbose": true,
|
"verbose": true,
|
||||||
"writedescription": false,
|
"writedescription": false,
|
||||||
"writeinfojson": true,
|
"writeinfojson": true,
|
||||||
"writeannotations": false,
|
|
||||||
"writelink": false,
|
"writelink": false,
|
||||||
"writeurllink": false,
|
"writeurllink": false,
|
||||||
"writewebloclink": false,
|
"writewebloclink": false,
|
||||||
|
|||||||
@@ -36,6 +36,18 @@ def do_GET(self):
|
|||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
||||||
|
elif self.path == '/fake.m3u8':
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Length', '1024')
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(1024 * b'\x00')
|
||||||
|
elif self.path == '/bipbop.m3u8':
|
||||||
|
with open('test/testdata/m3u8/bipbop_16x9.m3u8', 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Length', str(len(data)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(data)
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@@ -53,6 +65,18 @@ def setUp(self):
|
|||||||
def test_ie_key(self):
|
def test_ie_key(self):
|
||||||
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
||||||
|
|
||||||
|
def test_get_netrc_login_info(self):
|
||||||
|
for params in [
|
||||||
|
{'usenetrc': True, 'netrc_location': './test/testdata/netrc/netrc'},
|
||||||
|
{'netrc_cmd': f'{sys.executable} ./test/testdata/netrc/print_netrc.py'},
|
||||||
|
]:
|
||||||
|
ie = DummyIE(FakeYDL(params))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='normal_use'), ('user', 'pass'))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_user'), ('', 'pass'))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_pass'), ('user', ''))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='both_empty'), ('', ''))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='nonexistent'), (None, None))
|
||||||
|
|
||||||
def test_html_search_regex(self):
|
def test_html_search_regex(self):
|
||||||
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
||||||
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
||||||
@@ -302,6 +326,20 @@ def test_search_json_ld_realworld(self):
|
|||||||
},
|
},
|
||||||
{},
|
{},
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
# test thumbnail_url key without URL scheme
|
||||||
|
r'''
|
||||||
|
<script type="application/ld+json">
|
||||||
|
{
|
||||||
|
"@context": "https://schema.org",
|
||||||
|
"@type": "VideoObject",
|
||||||
|
"thumbnail_url": "//www.nobelprize.org/images/12693-landscape-medium-gallery.jpg"
|
||||||
|
}</script>''',
|
||||||
|
{
|
||||||
|
'thumbnails': [{'url': 'https://www.nobelprize.org/images/12693-landscape-medium-gallery.jpg'}],
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
||||||
expect_dict(
|
expect_dict(
|
||||||
@@ -626,6 +664,7 @@ def test_parse_m3u8_formats(self):
|
|||||||
'img_bipbop_adv_example_fmp4',
|
'img_bipbop_adv_example_fmp4',
|
||||||
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
[{
|
[{
|
||||||
|
# 60kbps (bitrate not provided in m3u8); sorted as worst because it's grouped with lowest bitrate video track
|
||||||
'format_id': 'aud1-English',
|
'format_id': 'aud1-English',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
@@ -633,15 +672,9 @@ def test_parse_m3u8_formats(self):
|
|||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'protocol': 'm3u8_native',
|
'protocol': 'm3u8_native',
|
||||||
'audio_ext': 'mp4',
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 0,
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'aud2-English',
|
# 192kbps (bitrate not provided in m3u8)
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
|
||||||
'language': 'en',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'protocol': 'm3u8_native',
|
|
||||||
'audio_ext': 'mp4',
|
|
||||||
}, {
|
|
||||||
'format_id': 'aud3-English',
|
'format_id': 'aud3-English',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
@@ -649,6 +682,17 @@ def test_parse_m3u8_formats(self):
|
|||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'protocol': 'm3u8_native',
|
'protocol': 'm3u8_native',
|
||||||
'audio_ext': 'mp4',
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 1,
|
||||||
|
}, {
|
||||||
|
# 384kbps (bitrate not provided in m3u8); sorted as best because it's grouped with the highest bitrate video track
|
||||||
|
'format_id': 'aud2-English',
|
||||||
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||||
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
|
'language': 'en',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'protocol': 'm3u8_native',
|
||||||
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 2,
|
||||||
}, {
|
}, {
|
||||||
'format_id': '530',
|
'format_id': '530',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
||||||
@@ -1901,7 +1945,7 @@ def test_response_with_expected_status_returns_content(self):
|
|||||||
server_thread.daemon = True
|
server_thread.daemon = True
|
||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
(content, urlh) = self.ie._download_webpage_handle(
|
content, _ = self.ie._download_webpage_handle(
|
||||||
f'http://127.0.0.1:{port}/teapot', None,
|
f'http://127.0.0.1:{port}/teapot', None,
|
||||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||||
@@ -1915,6 +1959,208 @@ def test_search_nextjs_data(self):
|
|||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
||||||
|
|
||||||
|
def test_search_nextjs_v13_data(self):
|
||||||
|
HTML = R'''
|
||||||
|
<script>(self.__next_f=self.__next_f||[]).push([0])</script>
|
||||||
|
<script>self.__next_f.push([2,"0:[\"$\",\"$L0\",null,{\"do_not_add_this\":\"fail\"}]\n"])</script>
|
||||||
|
<script>self.__next_f.push([1,"1:I[46975,[],\"HTTPAccessFallbackBoundary\"]\n2:I[32630,[\"8183\",\"static/chunks/8183-768193f6a9e33cdd.js\"]]\n"])</script>
|
||||||
|
<script nonce="abc123">self.__next_f.push([1,"e:[false,[\"$\",\"div\",null,{\"children\":[\"$\",\"$L18\",null,{\"foo\":\"bar\"}]}],false]\n "])</script>
|
||||||
|
<script>self.__next_f.push([1,"2a:[[\"$\",\"div\",null,{\"className\":\"flex flex-col\",\"children\":[]}],[\"$\",\"$L16\",null,{\"meta\":{\"dateCreated\":1730489700,\"uuid\":\"40cac41d-8d29-4ef5-aa11-75047b9f0907\"}}]]\n"])</script>
|
||||||
|
<script>self.__next_f.push([1,"df:[\"$undefined\",[\"$\",\"div\",null,{\"children\":[\"$\",\"$L17\",null,{}],\"do_not_include_this_field\":\"fail\"}],[\"$\",\"div\",null,{\"children\":[[\"$\",\"$L19\",null,{\"duplicated_field_name\":{\"x\":1}}],[\"$\",\"$L20\",null,{\"duplicated_field_name\":{\"y\":2}}]]}],\"$undefined\"]\n"])</script>
|
||||||
|
<script>self.__next_f.push([3,"MzM6WyIkIiwiJEwzMiIsbnVsbCx7ImRlY29kZWQiOiJzdWNjZXNzIn1d"])</script>
|
||||||
|
'''
|
||||||
|
EXPECTED = {
|
||||||
|
'18': {
|
||||||
|
'foo': 'bar',
|
||||||
|
},
|
||||||
|
'16': {
|
||||||
|
'meta': {
|
||||||
|
'dateCreated': 1730489700,
|
||||||
|
'uuid': '40cac41d-8d29-4ef5-aa11-75047b9f0907',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'19': {
|
||||||
|
'duplicated_field_name': {'x': 1},
|
||||||
|
},
|
||||||
|
'20': {
|
||||||
|
'duplicated_field_name': {'y': 2},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data(HTML, None), EXPECTED)
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data('', None, fatal=False), {})
|
||||||
|
self.assertEqual(self.ie._search_nextjs_v13_data(None, None, fatal=False), {})
|
||||||
|
|
||||||
|
def test_search_nuxt_json(self):
|
||||||
|
HTML_TMPL = '<script data-ssr="true" id="__NUXT_DATA__" type="application/json">[{}]</script>'
|
||||||
|
VALID_DATA = '''
|
||||||
|
["ShallowReactive",1],
|
||||||
|
{"data":2,"state":21,"once":25,"_errors":28,"_server_errors":30},
|
||||||
|
["ShallowReactive",3],
|
||||||
|
{"$abcdef123456":4},
|
||||||
|
{"podcast":5,"activeEpisodeData":7},
|
||||||
|
{"podcast":6,"seasons":14},
|
||||||
|
{"title":10,"id":11},
|
||||||
|
["Reactive",8],
|
||||||
|
{"episode":9,"creators":18,"empty_list":20},
|
||||||
|
{"title":12,"id":13,"refs":34,"empty_refs":35},
|
||||||
|
"Series Title",
|
||||||
|
"podcast-id-01",
|
||||||
|
"Episode Title",
|
||||||
|
"episode-id-99",
|
||||||
|
[15,16,17],
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
[19],
|
||||||
|
"Podcast Creator",
|
||||||
|
[],
|
||||||
|
{"$ssite-config":22},
|
||||||
|
{"env":23,"name":24,"map":26,"numbers":14},
|
||||||
|
"production",
|
||||||
|
"podcast-website",
|
||||||
|
["Set"],
|
||||||
|
["Reactive",27],
|
||||||
|
["Map"],
|
||||||
|
["ShallowReactive",29],
|
||||||
|
{},
|
||||||
|
["NuxtError",31],
|
||||||
|
{"status":32,"message":33},
|
||||||
|
503,
|
||||||
|
"Service Unavailable",
|
||||||
|
[36,37],
|
||||||
|
[38,39],
|
||||||
|
["Ref",40],
|
||||||
|
["ShallowRef",41],
|
||||||
|
["EmptyRef",42],
|
||||||
|
["EmptyShallowRef",43],
|
||||||
|
"ref",
|
||||||
|
"shallow_ref",
|
||||||
|
"{\\"ref\\":1}",
|
||||||
|
"{\\"shallow_ref\\":2}"
|
||||||
|
'''
|
||||||
|
PAYLOAD = {
|
||||||
|
'data': {
|
||||||
|
'$abcdef123456': {
|
||||||
|
'podcast': {
|
||||||
|
'podcast': {
|
||||||
|
'title': 'Series Title',
|
||||||
|
'id': 'podcast-id-01',
|
||||||
|
},
|
||||||
|
'seasons': [1, 2, 3],
|
||||||
|
},
|
||||||
|
'activeEpisodeData': {
|
||||||
|
'episode': {
|
||||||
|
'title': 'Episode Title',
|
||||||
|
'id': 'episode-id-99',
|
||||||
|
'refs': ['ref', 'shallow_ref'],
|
||||||
|
'empty_refs': [{'ref': 1}, {'shallow_ref': 2}],
|
||||||
|
},
|
||||||
|
'creators': ['Podcast Creator'],
|
||||||
|
'empty_list': [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'state': {
|
||||||
|
'$ssite-config': {
|
||||||
|
'env': 'production',
|
||||||
|
'name': 'podcast-website',
|
||||||
|
'map': [],
|
||||||
|
'numbers': [1, 2, 3],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'once': [],
|
||||||
|
'_errors': {},
|
||||||
|
'_server_errors': {
|
||||||
|
'status': 503,
|
||||||
|
'message': 'Service Unavailable',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
PARTIALLY_INVALID = [(
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
{"invalid_raw_list":2},
|
||||||
|
[15,16,17]
|
||||||
|
''',
|
||||||
|
{'data': {'invalid_raw_list': [None, None, None]}},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyShallowRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
)]
|
||||||
|
INVALID = [
|
||||||
|
'''
|
||||||
|
[]
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
["unsupported",1],
|
||||||
|
{"data":2},
|
||||||
|
{}
|
||||||
|
''',
|
||||||
|
]
|
||||||
|
DEFAULT = object()
|
||||||
|
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json(HTML_TMPL.format(VALID_DATA), None), PAYLOAD)
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json('', None, fatal=False), {})
|
||||||
|
self.assertIs(self.ie._search_nuxt_json('', None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
for data, expected in PARTIALLY_INVALID:
|
||||||
|
self.assertEqual(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, fatal=False), expected)
|
||||||
|
|
||||||
|
for data in INVALID:
|
||||||
|
self.assertIs(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
|
||||||
|
class TestInfoExtractorNetwork(unittest.TestCase):
|
||||||
|
def setUp(self, /):
|
||||||
|
self.httpd = http.server.HTTPServer(
|
||||||
|
('127.0.0.1', 0), InfoExtractorTestRequestHandler)
|
||||||
|
self.port = http_server_port(self.httpd)
|
||||||
|
|
||||||
|
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||||
|
self.server_thread.daemon = True
|
||||||
|
self.server_thread.start()
|
||||||
|
|
||||||
|
self.called = False
|
||||||
|
|
||||||
|
def require_warning(*args, **kwargs):
|
||||||
|
self.called = True
|
||||||
|
|
||||||
|
self.ydl = FakeYDL()
|
||||||
|
self.ydl.report_warning = require_warning
|
||||||
|
self.ie = DummyIE(self.ydl)
|
||||||
|
|
||||||
|
def tearDown(self, /):
|
||||||
|
self.ydl.close()
|
||||||
|
self.httpd.shutdown()
|
||||||
|
self.httpd.server_close()
|
||||||
|
self.server_thread.join(1)
|
||||||
|
|
||||||
|
def test_extract_m3u8_formats(self):
|
||||||
|
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||||
|
f'http://127.0.0.1:{self.port}/bipbop.m3u8', None, fatal=False)
|
||||||
|
self.assertFalse(self.called)
|
||||||
|
self.assertTrue(formats)
|
||||||
|
self.assertTrue(subtitles)
|
||||||
|
|
||||||
|
def test_extract_m3u8_formats_warning(self):
|
||||||
|
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||||
|
f'http://127.0.0.1:{self.port}/fake.m3u8', None, fatal=False)
|
||||||
|
self.assertTrue(self.called, 'Warning was not issued for binary m3u8 file')
|
||||||
|
self.assertFalse(formats)
|
||||||
|
self.assertFalse(subtitles)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from yt_dlp.globals import all_plugins_loaded
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
@@ -15,8 +17,6 @@
|
|||||||
|
|
||||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_os_name
|
|
||||||
from yt_dlp.extractor import YoutubeIE
|
|
||||||
from yt_dlp.extractor.common import InfoExtractor
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
from yt_dlp.postprocessor.common import PostProcessor
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
from yt_dlp.utils import (
|
from yt_dlp.utils import (
|
||||||
@@ -236,6 +236,35 @@ def test_format_selection_video(self):
|
|||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
||||||
|
|
||||||
|
def test_format_selection_by_vcodec_sort(self):
|
||||||
|
formats = [
|
||||||
|
{'format_id': 'av1-format', 'ext': 'mp4', 'vcodec': 'av1', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'vp9-hdr-format', 'ext': 'mp4', 'vcodec': 'vp09.02.50.10.01.09.18.09.00', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'vp9-sdr-format', 'ext': 'mp4', 'vcodec': 'vp09.00.50.08', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'h265-format', 'ext': 'mp4', 'vcodec': 'h265', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
]
|
||||||
|
info_dict = _make_result(formats)
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9.2']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9.2']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||||
|
|
||||||
def test_format_selection_string_ops(self):
|
def test_format_selection_string_ops(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
||||||
@@ -306,99 +335,6 @@ def test_format_selection_string_ops(self):
|
|||||||
ydl = YDL({'format': '[format_id!*=-]'})
|
ydl = YDL({'format': '[format_id!*=-]'})
|
||||||
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
|
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
|
||||||
|
|
||||||
def test_youtube_format_selection(self):
|
|
||||||
# FIXME: Rewrite in accordance with the new format sorting options
|
|
||||||
return
|
|
||||||
|
|
||||||
order = [
|
|
||||||
'38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
|
|
||||||
# Apple HTTP Live Streaming
|
|
||||||
'96', '95', '94', '93', '92', '132', '151',
|
|
||||||
# 3D
|
|
||||||
'85', '84', '102', '83', '101', '82', '100',
|
|
||||||
# Dash video
|
|
||||||
'137', '248', '136', '247', '135', '246',
|
|
||||||
'245', '244', '134', '243', '133', '242', '160',
|
|
||||||
# Dash audio
|
|
||||||
'141', '172', '140', '171', '139',
|
|
||||||
]
|
|
||||||
|
|
||||||
def format_info(f_id):
|
|
||||||
info = YoutubeIE._formats[f_id].copy()
|
|
||||||
|
|
||||||
# XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
|
|
||||||
# and 'vcodec', while in tests such information is incomplete since
|
|
||||||
# commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
|
|
||||||
# test_YoutubeDL.test_youtube_format_selection is broken without
|
|
||||||
# this fix
|
|
||||||
if 'acodec' in info and 'vcodec' not in info:
|
|
||||||
info['vcodec'] = 'none'
|
|
||||||
elif 'vcodec' in info and 'acodec' not in info:
|
|
||||||
info['acodec'] = 'none'
|
|
||||||
|
|
||||||
info['format_id'] = f_id
|
|
||||||
info['url'] = 'url:' + f_id
|
|
||||||
return info
|
|
||||||
formats_order = [format_info(f_id) for f_id in order]
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
|
||||||
self.assertEqual(downloaded['format_id'], '248+172')
|
|
||||||
self.assertEqual(downloaded['ext'], 'mp4')
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
|
||||||
self.assertEqual(downloaded['format_id'], '38')
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
|
||||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
|
||||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
|
||||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
|
||||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
|
||||||
self.assertEqual(downloaded_ids, ['248+141'])
|
|
||||||
|
|
||||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
|
||||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
|
||||||
|
|
||||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
|
||||||
ydl.sort_formats(info_dict)
|
|
||||||
ydl.process_ie_result(info_dict)
|
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
|
||||||
|
|
||||||
def test_audio_only_extractor_format_selection(self):
|
def test_audio_only_extractor_format_selection(self):
|
||||||
# For extractors with incomplete formats (all formats are audio-only or
|
# For extractors with incomplete formats (all formats are audio-only or
|
||||||
# video-only) best and worst should fallback to corresponding best/worst
|
# video-only) best and worst should fallback to corresponding best/worst
|
||||||
@@ -458,11 +394,11 @@ def assert_syntax_error(format_spec):
|
|||||||
|
|
||||||
def test_format_filtering(self):
|
def test_format_filtering(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': 'A', 'filesize': 500, 'width': 1000},
|
{'format_id': 'A', 'filesize': 500, 'width': 1000, 'aspect_ratio': 1.0},
|
||||||
{'format_id': 'B', 'filesize': 1000, 'width': 500},
|
{'format_id': 'B', 'filesize': 1000, 'width': 500, 'aspect_ratio': 1.33},
|
||||||
{'format_id': 'C', 'filesize': 1000, 'width': 400},
|
{'format_id': 'C', 'filesize': 1000, 'width': 400, 'aspect_ratio': 1.5},
|
||||||
{'format_id': 'D', 'filesize': 2000, 'width': 600},
|
{'format_id': 'D', 'filesize': 2000, 'width': 600, 'aspect_ratio': 1.78},
|
||||||
{'format_id': 'E', 'filesize': 3000},
|
{'format_id': 'E', 'filesize': 3000, 'aspect_ratio': 0.56},
|
||||||
{'format_id': 'F'},
|
{'format_id': 'F'},
|
||||||
{'format_id': 'G', 'filesize': 1000000},
|
{'format_id': 'G', 'filesize': 1000000},
|
||||||
]
|
]
|
||||||
@@ -521,6 +457,31 @@ def test_format_filtering(self):
|
|||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
self.assertEqual(ydl.downloaded_info_dicts, [])
|
self.assertEqual(ydl.downloaded_info_dicts, [])
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'best[aspect_ratio=1]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'A')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio > 1.00]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio < 1.00]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['E'])
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'best[aspect_ratio=1.5]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'C')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio!=1]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['E', 'D', 'C', 'B'])
|
||||||
|
|
||||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', False)
|
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', False)
|
||||||
def test_default_format_spec_without_ffmpeg(self):
|
def test_default_format_spec_without_ffmpeg(self):
|
||||||
ydl = YDL({})
|
ydl = YDL({})
|
||||||
@@ -694,7 +655,7 @@ def test(tmpl, expected, *, info=None, **params):
|
|||||||
|
|
||||||
if not isinstance(expected, (list, tuple)):
|
if not isinstance(expected, (list, tuple)):
|
||||||
expected = (expected, expected)
|
expected = (expected, expected)
|
||||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||||
if callable(expect):
|
if callable(expect):
|
||||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||||
elif expect is not None:
|
elif expect is not None:
|
||||||
@@ -733,6 +694,13 @@ def test(tmpl, expected, *, info=None, **params):
|
|||||||
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
||||||
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
||||||
|
|
||||||
|
# Sanitization options
|
||||||
|
test('%(title3)s', (None, 'foo⧸bar⧹test'))
|
||||||
|
test('%(title5)s', (None, 'aei_A'), restrictfilenames=True)
|
||||||
|
test('%(title3)s', (None, 'foo_bar_test'), windowsfilenames=False, restrictfilenames=True)
|
||||||
|
if sys.platform != 'win32':
|
||||||
|
test('%(title3)s', (None, 'foo⧸bar\\test'), windowsfilenames=False)
|
||||||
|
|
||||||
# ID sanitization
|
# ID sanitization
|
||||||
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
||||||
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
||||||
@@ -810,8 +778,8 @@ def expect_same_infodict(out):
|
|||||||
test('%(filesize)#D', '1Ki')
|
test('%(filesize)#D', '1Ki')
|
||||||
test('%(height)5.2D', ' 1.08k')
|
test('%(height)5.2D', ' 1.08k')
|
||||||
test('%(title4)#S', 'foo_bar_test')
|
test('%(title4)#S', 'foo_bar_test')
|
||||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if os.name == 'nt' else ' ')))
|
||||||
if compat_os_name == 'nt':
|
if os.name == 'nt':
|
||||||
test('%(title4)q', ('"foo ""bar"" test"', None))
|
test('%(title4)q', ('"foo ""bar"" test"', None))
|
||||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
||||||
test('%(formats.0.id)#q', ('"id 1"', None))
|
test('%(formats.0.id)#q', ('"id 1"', None))
|
||||||
@@ -874,9 +842,9 @@ def gen():
|
|||||||
|
|
||||||
# Environment variable expansion for prepare_filename
|
# Environment variable expansion for prepare_filename
|
||||||
os.environ['__yt_dlp_var'] = 'expanded'
|
os.environ['__yt_dlp_var'] = 'expanded'
|
||||||
envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
|
envvar = '%__yt_dlp_var%' if os.name == 'nt' else '$__yt_dlp_var'
|
||||||
test(envvar, (envvar, 'expanded'))
|
test(envvar, (envvar, 'expanded'))
|
||||||
if compat_os_name == 'nt':
|
if os.name == 'nt':
|
||||||
test('%s%', ('%s%', '%s%'))
|
test('%s%', ('%s%', '%s%'))
|
||||||
os.environ['s'] = 'expanded'
|
os.environ['s'] = 'expanded'
|
||||||
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
||||||
@@ -1085,7 +1053,7 @@ def test_selection(params, expected_ids, evaluate_all=False):
|
|||||||
entries = func(evaluated)
|
entries = func(evaluated)
|
||||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||||
for v in get_downloaded_info_dicts(params, entries)]
|
for v in get_downloaded_info_dicts(params, entries)]
|
||||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||||
|
|
||||||
test_selection({}, INDICES)
|
test_selection({}, INDICES)
|
||||||
@@ -1367,6 +1335,33 @@ def check_for_cookie_header(result):
|
|||||||
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
||||||
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
||||||
|
|
||||||
|
def test_load_plugins_compat(self):
|
||||||
|
# Should try to reload plugins if they haven't already been loaded
|
||||||
|
all_plugins_loaded.value = False
|
||||||
|
FakeYDL().close()
|
||||||
|
assert all_plugins_loaded.value
|
||||||
|
|
||||||
|
def test_close_hooks(self):
|
||||||
|
# Should call all registered close hooks on close
|
||||||
|
close_hook_called = False
|
||||||
|
close_hook_two_called = False
|
||||||
|
|
||||||
|
def close_hook():
|
||||||
|
nonlocal close_hook_called
|
||||||
|
close_hook_called = True
|
||||||
|
|
||||||
|
def close_hook_two():
|
||||||
|
nonlocal close_hook_two_called
|
||||||
|
close_hook_two_called = True
|
||||||
|
|
||||||
|
ydl = FakeYDL()
|
||||||
|
ydl.add_close_hook(close_hook)
|
||||||
|
ydl.add_close_hook(close_hook_two)
|
||||||
|
|
||||||
|
ydl.close()
|
||||||
|
self.assertTrue(close_hook_called, 'Close hook was not called')
|
||||||
|
self.assertTrue(close_hook_two_called, 'Close hook two was not called')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -27,7 +27,6 @@
|
|||||||
pad_block,
|
pad_block,
|
||||||
)
|
)
|
||||||
from yt_dlp.dependencies import Cryptodome
|
from yt_dlp.dependencies import Cryptodome
|
||||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
|
||||||
|
|
||||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||||
|
|
||||||
@@ -40,33 +39,33 @@ def setUp(self):
|
|||||||
def test_encrypt(self):
|
def test_encrypt(self):
|
||||||
msg = b'message'
|
msg = b'message'
|
||||||
key = list(range(16))
|
key = list(range(16))
|
||||||
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
|
encrypted = aes_encrypt(list(msg), key)
|
||||||
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
|
decrypted = bytes(aes_decrypt(encrypted, key))
|
||||||
self.assertEqual(decrypted, msg)
|
self.assertEqual(decrypted, msg)
|
||||||
|
|
||||||
def test_cbc_decrypt(self):
|
def test_cbc_decrypt(self):
|
||||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
decrypted = bytes(aes_cbc_decrypt(list(data), self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome.AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
decrypted = aes_cbc_decrypt_bytes(data, bytes(self.key), bytes(self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_cbc_encrypt(self):
|
def test_cbc_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
encrypted = bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
||||||
|
|
||||||
def test_ctr_decrypt(self):
|
def test_ctr_decrypt(self):
|
||||||
data = bytes_to_intlist(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
data = list(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||||
decrypted = intlist_to_bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
decrypted = bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_ctr_encrypt(self):
|
def test_ctr_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
encrypted = bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||||
@@ -75,47 +74,59 @@ def test_gcm_decrypt(self):
|
|||||||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
||||||
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
||||||
|
|
||||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome.AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
|
def test_gcm_aligned_decrypt(self):
|
||||||
|
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f'
|
||||||
|
authentication_tag = b'\x08\xb1\x9d!&\x98\xd0\xeaRq\x90\xe6;\xb5]\xd8'
|
||||||
|
|
||||||
|
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||||
|
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||||
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||||
|
if Cryptodome.AES:
|
||||||
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
|
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||||
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||||
|
|
||||||
def test_decrypt_text(self):
|
def test_decrypt_text(self):
|
||||||
password = intlist_to_bytes(self.key).decode()
|
password = bytes(self.key).decode()
|
||||||
encrypted = base64.b64encode(
|
encrypted = base64.b64encode(
|
||||||
intlist_to_bytes(self.iv[:8])
|
bytes(self.iv[:8])
|
||||||
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae',
|
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae',
|
||||||
).decode()
|
).decode()
|
||||||
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
||||||
self.assertEqual(decrypted, self.secret_msg)
|
self.assertEqual(decrypted, self.secret_msg)
|
||||||
|
|
||||||
password = intlist_to_bytes(self.key).decode()
|
password = bytes(self.key).decode()
|
||||||
encrypted = base64.b64encode(
|
encrypted = base64.b64encode(
|
||||||
intlist_to_bytes(self.iv[:8])
|
bytes(self.iv[:8])
|
||||||
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83',
|
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83',
|
||||||
).decode()
|
).decode()
|
||||||
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
||||||
self.assertEqual(decrypted, self.secret_msg)
|
self.assertEqual(decrypted, self.secret_msg)
|
||||||
|
|
||||||
def test_ecb_encrypt(self):
|
def test_ecb_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
encrypted = bytes(aes_ecb_encrypt(data, self.key))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
|
|
||||||
def test_ecb_decrypt(self):
|
def test_ecb_decrypt(self):
|
||||||
data = bytes_to_intlist(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
data = list(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
decrypted = bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_key_expansion(self):
|
def test_key_expansion(self):
|
||||||
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||||
|
|
||||||
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
self.assertEqual(key_expansion(list(bytearray.fromhex(key))), [
|
||||||
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||||
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||||
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@@ -12,12 +13,7 @@
|
|||||||
|
|
||||||
from yt_dlp import compat
|
from yt_dlp import compat
|
||||||
from yt_dlp.compat import urllib # isort: split
|
from yt_dlp.compat import urllib # isort: split
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||||
compat_etree_fromstring,
|
|
||||||
compat_expanduser,
|
|
||||||
compat_urllib_parse_unquote, # noqa: TID251
|
|
||||||
compat_urllib_parse_urlencode, # noqa: TID251
|
|
||||||
)
|
|
||||||
from yt_dlp.compat.urllib.request import getproxies
|
from yt_dlp.compat.urllib.request import getproxies
|
||||||
|
|
||||||
|
|
||||||
@@ -26,9 +22,6 @@ def test_compat_passthrough(self):
|
|||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
_ = compat.compat_basestring
|
_ = compat.compat_basestring
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
|
||||||
_ = compat.WINDOWS_VT_MODE
|
|
||||||
|
|
||||||
self.assertEqual(urllib.request.getproxies, getproxies)
|
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
@@ -43,39 +36,6 @@ def test_compat_expanduser(self):
|
|||||||
finally:
|
finally:
|
||||||
os.environ['HOME'] = old_home or ''
|
os.environ['HOME'] = old_home or ''
|
||||||
|
|
||||||
def test_compat_urllib_parse_unquote(self):
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%7e/abc+def'), '~/abc+def')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote(''), '')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%'), '%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%%'), '%%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%%%'), '%%%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%2F'), '/')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%2f'), '/')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%E6%B4%A5%E6%B3%A2'), '津波')
|
|
||||||
self.assertEqual(
|
|
||||||
compat_urllib_parse_unquote('''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%25%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
|
|
||||||
%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a'''),
|
|
||||||
'''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
|
|
||||||
%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''')
|
|
||||||
self.assertEqual(
|
|
||||||
compat_urllib_parse_unquote('''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%'''),
|
|
||||||
'''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
|
|
||||||
|
|
||||||
def test_compat_urllib_parse_unquote_plus(self):
|
|
||||||
self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
|
|
||||||
self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
|
|
||||||
|
|
||||||
def test_compat_urllib_parse_urlencode(self):
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': b'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': 'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': b'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', 'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', b'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
|
|
||||||
|
|
||||||
def test_compat_etree_fromstring(self):
|
def test_compat_etree_fromstring(self):
|
||||||
xml = '''
|
xml = '''
|
||||||
<root foo="bar" spam="中文">
|
<root foo="bar" spam="中文">
|
||||||
@@ -100,6 +60,45 @@ def test_compat_etree_fromstring_doctype(self):
|
|||||||
def test_struct_unpack(self):
|
def test_struct_unpack(self):
|
||||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||||
|
|
||||||
|
def test_compat_datetime_from_timestamp(self):
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(3600),
|
||||||
|
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-86400),
|
||||||
|
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0.5),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1.000001),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1.25),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1577923200),
|
||||||
|
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(4102444800),
|
||||||
|
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(173568960000),
|
||||||
|
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -58,6 +58,14 @@ def test_get_desktop_environment(self):
|
|||||||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'my_custom_de', 'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
|
||||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
@@ -105,6 +113,13 @@ def test_chrome_cookie_decryptor_linux_v11(self):
|
|||||||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
||||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
|
def test_chrome_cookie_decryptor_linux_v10_meta24(self):
|
||||||
|
with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
|
||||||
|
encrypted_value = b'v10\x1f\xe4\x0e[\x83\x0c\xcc*kPi \xce\x8d\x1d\xbb\x80\r\x11\t\xbb\x9e^Hy\x94\xf4\x963\x9f\x82\xba\xfe\xa1\xed\xb9\xf1)\x00710\x92\xc8/<\x96B'
|
||||||
|
value = 'DE'
|
||||||
|
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger(), meta_version=24)
|
||||||
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
def test_chrome_cookie_decryptor_windows_v10(self):
|
def test_chrome_cookie_decryptor_windows_v10(self):
|
||||||
with MonkeyPatch(cookies, {
|
with MonkeyPatch(cookies, {
|
||||||
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&',
|
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&',
|
||||||
@@ -114,6 +129,15 @@ def test_chrome_cookie_decryptor_windows_v10(self):
|
|||||||
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
||||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
|
def test_chrome_cookie_decryptor_windows_v10_meta24(self):
|
||||||
|
with MonkeyPatch(cookies, {
|
||||||
|
'_get_windows_v10_key': lambda *args, **kwargs: b'\xea\x8b\x02\xc3\xc6\xc5\x99\xc3\xa3[ j\xfa\xf6\xfcU\xac\x13u\xdc\x0c\x0e\xf1\x03\x90\xb6\xdf\xbb\x8fL\xb1\xb2',
|
||||||
|
}):
|
||||||
|
encrypted_value = b'v10dN\xe1\xacy\x84^\xe1I\xact\x03r\xfb\xe2\xce{^\x0e<(\xb0y\xeb\x01\xfb@"\x9e\x8c\xa53~\xdb*\x8f\xac\x8b\xe3\xfd3\x06\xe5\x93\x19OyOG\xb2\xfb\x1d$\xc0\xda\x13j\x9e\xfe\xc5\xa3\xa8\xfe\xd9'
|
||||||
|
value = '1234'
|
||||||
|
decryptor = WindowsChromeCookieDecryptor('', Logger(), meta_version=24)
|
||||||
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
def test_chrome_cookie_decryptor_mac_v10(self):
|
def test_chrome_cookie_decryptor_mac_v10(self):
|
||||||
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
||||||
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
||||||
|
|||||||
235
test/test_devalue.py
Normal file
235
test/test_devalue.py
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from yt_dlp.utils.jslib import devalue
|
||||||
|
|
||||||
|
|
||||||
|
TEST_CASES_EQUALS = [{
|
||||||
|
'name': 'int',
|
||||||
|
'unparsed': [-42],
|
||||||
|
'parsed': -42,
|
||||||
|
}, {
|
||||||
|
'name': 'str',
|
||||||
|
'unparsed': ['woo!!!'],
|
||||||
|
'parsed': 'woo!!!',
|
||||||
|
}, {
|
||||||
|
'name': 'Number',
|
||||||
|
'unparsed': [['Object', 42]],
|
||||||
|
'parsed': 42,
|
||||||
|
}, {
|
||||||
|
'name': 'String',
|
||||||
|
'unparsed': [['Object', 'yar']],
|
||||||
|
'parsed': 'yar',
|
||||||
|
}, {
|
||||||
|
'name': 'Infinity',
|
||||||
|
'unparsed': -4,
|
||||||
|
'parsed': math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative Infinity',
|
||||||
|
'unparsed': -5,
|
||||||
|
'parsed': -math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative zero',
|
||||||
|
'unparsed': -6,
|
||||||
|
'parsed': -0.0,
|
||||||
|
}, {
|
||||||
|
'name': 'RegExp',
|
||||||
|
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||||
|
'parsed': re.compile('regexp'),
|
||||||
|
}, {
|
||||||
|
'name': 'Date',
|
||||||
|
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||||
|
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||||
|
}, {
|
||||||
|
'name': 'Array',
|
||||||
|
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||||
|
'parsed': ['a', 'b', 'c'],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (empty)',
|
||||||
|
'unparsed': [[]],
|
||||||
|
'parsed': [],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (sparse)',
|
||||||
|
'unparsed': [[-2, 1, -2], 'b'],
|
||||||
|
'parsed': [None, 'b', None],
|
||||||
|
}, {
|
||||||
|
'name': 'Object',
|
||||||
|
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||||
|
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||||
|
}, {
|
||||||
|
'name': 'Set',
|
||||||
|
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'Map',
|
||||||
|
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||||
|
'parsed': [['a', 'b']],
|
||||||
|
}, {
|
||||||
|
'name': 'BigInt',
|
||||||
|
'unparsed': [['BigInt', '1']],
|
||||||
|
'parsed': 1,
|
||||||
|
}, {
|
||||||
|
'name': 'Uint8Array',
|
||||||
|
'unparsed': [['Uint8Array', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'ArrayBuffer',
|
||||||
|
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'str (repetition)',
|
||||||
|
'unparsed': [[1, 1], 'a string'],
|
||||||
|
'parsed': ['a string', 'a string'],
|
||||||
|
}, {
|
||||||
|
'name': 'None (repetition)',
|
||||||
|
'unparsed': [[1, 1], None],
|
||||||
|
'parsed': [None, None],
|
||||||
|
}, {
|
||||||
|
'name': 'dict (repetition)',
|
||||||
|
'unparsed': [[1, 1], {}],
|
||||||
|
'parsed': [{}, {}],
|
||||||
|
}, {
|
||||||
|
'name': 'Object without prototype',
|
||||||
|
'unparsed': [['null']],
|
||||||
|
'parsed': {},
|
||||||
|
}, {
|
||||||
|
'name': 'cross-realm POJO',
|
||||||
|
'unparsed': [{}],
|
||||||
|
'parsed': {},
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_IS = [{
|
||||||
|
'name': 'bool',
|
||||||
|
'unparsed': [True],
|
||||||
|
'parsed': True,
|
||||||
|
}, {
|
||||||
|
'name': 'Boolean',
|
||||||
|
'unparsed': [['Object', False]],
|
||||||
|
'parsed': False,
|
||||||
|
}, {
|
||||||
|
'name': 'undefined',
|
||||||
|
'unparsed': -1,
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': [None],
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'NaN',
|
||||||
|
'unparsed': -3,
|
||||||
|
'parsed': math.nan,
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_INVALID = [{
|
||||||
|
'name': 'empty string',
|
||||||
|
'unparsed': '',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'hole',
|
||||||
|
'unparsed': -2,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'string',
|
||||||
|
'unparsed': 'hello',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'number',
|
||||||
|
'unparsed': 42,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'boolean',
|
||||||
|
'unparsed': True,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': None,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'object',
|
||||||
|
'unparsed': {},
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'empty array',
|
||||||
|
'unparsed': [],
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected a non-empty list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'Python negative indexing',
|
||||||
|
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||||
|
'error': IndexError,
|
||||||
|
'pattern': r'invalid index: -7',
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class TestDevalue(unittest.TestCase):
|
||||||
|
def test_devalue_parse_equals(self):
|
||||||
|
for tc in TEST_CASES_EQUALS:
|
||||||
|
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_is(self):
|
||||||
|
for tc in TEST_CASES_IS:
|
||||||
|
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_invalid(self):
|
||||||
|
for tc in TEST_CASES_INVALID:
|
||||||
|
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||||
|
devalue.parse(tc['unparsed'])
|
||||||
|
|
||||||
|
def test_devalue_parse_cyclical(self):
|
||||||
|
name = 'Map (cyclical)'
|
||||||
|
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||||
|
self.assertEqual(result[0][0], 'self', name)
|
||||||
|
self.assertIs(result, result[0][1], name)
|
||||||
|
|
||||||
|
name = 'Set (cyclical)'
|
||||||
|
result = devalue.parse([['Set', 0, 1], 42])
|
||||||
|
self.assertEqual(result[1], 42, name)
|
||||||
|
self.assertIs(result, result[0], name)
|
||||||
|
|
||||||
|
result = devalue.parse([[0]])
|
||||||
|
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||||
|
|
||||||
|
name = 'Object (cyclical)'
|
||||||
|
result = devalue.parse([{'self': 0}])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Object with null prototype (cyclical)'
|
||||||
|
result = devalue.parse([['null', 'self', 0]])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Objects (cyclical)'
|
||||||
|
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||||
|
self.assertIs(result[0], result[1]['first'], name)
|
||||||
|
self.assertIs(result[1], result[0]['second'], name)
|
||||||
|
|
||||||
|
def test_devalue_parse_revivers(self):
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||||
|
{'a': 'b'}, 'revivers (indirect)')
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||||
|
{'a': 0}, 'revivers (parse)')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
from test.helper import (
|
from test.helper import (
|
||||||
assertGreaterEqual,
|
assertGreaterEqual,
|
||||||
|
assertLessEqual,
|
||||||
expect_info_dict,
|
expect_info_dict,
|
||||||
expect_warnings,
|
expect_warnings,
|
||||||
get_params,
|
get_params,
|
||||||
@@ -65,10 +66,6 @@ def _file_md5(fn):
|
|||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestDownload(unittest.TestCase):
|
class TestDownload(unittest.TestCase):
|
||||||
# Parallel testing in nosetests. See
|
|
||||||
# http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
|
|
||||||
_multiprocess_shared_ = True
|
|
||||||
|
|
||||||
maxDiff = None
|
maxDiff = None
|
||||||
|
|
||||||
COMPLETED_TESTS = {}
|
COMPLETED_TESTS = {}
|
||||||
@@ -121,10 +118,13 @@ def print_skipping(reason):
|
|||||||
params = get_params(test_case.get('params', {}))
|
params = get_params(test_case.get('params', {}))
|
||||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||||
if is_playlist and 'playlist' not in test_case:
|
if is_playlist and 'playlist' not in test_case:
|
||||||
params.setdefault('extract_flat', 'in_playlist')
|
params.setdefault('playlistend', max(
|
||||||
params.setdefault('playlistend', test_case.get(
|
test_case.get('playlist_mincount', -1),
|
||||||
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
test_case.get('playlist_count', -2) + 1,
|
||||||
|
test_case.get('playlist_maxcount', -2) + 1))
|
||||||
params.setdefault('skip_download', True)
|
params.setdefault('skip_download', True)
|
||||||
|
if 'playlist_duration_sum' not in test_case:
|
||||||
|
params.setdefault('extract_flat', 'in_playlist')
|
||||||
|
|
||||||
ydl = YoutubeDL(params, auto_init=False)
|
ydl = YoutubeDL(params, auto_init=False)
|
||||||
ydl.add_default_info_extractors()
|
ydl.add_default_info_extractors()
|
||||||
@@ -159,6 +159,7 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
||||||
try_rm_tcs_files()
|
try_rm_tcs_files()
|
||||||
try:
|
try:
|
||||||
|
test_url = test_case['url']
|
||||||
try_num = 1
|
try_num = 1
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
@@ -166,7 +167,7 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
# for outside error handling, and returns the exit code
|
# for outside error handling, and returns the exit code
|
||||||
# instead of the result dict.
|
# instead of the result dict.
|
||||||
res_dict = ydl.extract_info(
|
res_dict = ydl.extract_info(
|
||||||
test_case['url'],
|
test_url,
|
||||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||||
except (DownloadError, ExtractorError) as err:
|
except (DownloadError, ExtractorError) as err:
|
||||||
# Check if the exception is not a network related one
|
# Check if the exception is not a network related one
|
||||||
@@ -194,23 +195,23 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
self.assertTrue('entries' in res_dict)
|
self.assertTrue('entries' in res_dict)
|
||||||
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
||||||
|
|
||||||
|
num_entries = len(res_dict.get('entries', []))
|
||||||
if 'playlist_mincount' in test_case:
|
if 'playlist_mincount' in test_case:
|
||||||
|
mincount = test_case['playlist_mincount']
|
||||||
assertGreaterEqual(
|
assertGreaterEqual(
|
||||||
self,
|
self, num_entries, mincount,
|
||||||
len(res_dict['entries']),
|
f'Expected at least {mincount} entries in playlist {test_url}, but got only {num_entries}')
|
||||||
test_case['playlist_mincount'],
|
|
||||||
'Expected at least %d in playlist %s, but got only %d' % (
|
|
||||||
test_case['playlist_mincount'], test_case['url'],
|
|
||||||
len(res_dict['entries'])))
|
|
||||||
if 'playlist_count' in test_case:
|
if 'playlist_count' in test_case:
|
||||||
|
count = test_case['playlist_count']
|
||||||
|
got = num_entries if num_entries <= count else 'more'
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len(res_dict['entries']),
|
num_entries, count,
|
||||||
test_case['playlist_count'],
|
f'Expected exactly {count} entries in playlist {test_url}, but got {got}')
|
||||||
'Expected %d entries in playlist %s, but got %d.' % (
|
if 'playlist_maxcount' in test_case:
|
||||||
test_case['playlist_count'],
|
maxcount = test_case['playlist_maxcount']
|
||||||
test_case['url'],
|
assertLessEqual(
|
||||||
len(res_dict['entries']),
|
self, num_entries, maxcount,
|
||||||
))
|
f'Expected at most {maxcount} entries in playlist {test_url}, but got more')
|
||||||
if 'playlist_duration_sum' in test_case:
|
if 'playlist_duration_sum' in test_case:
|
||||||
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
|||||||
@@ -15,7 +15,6 @@
|
|||||||
from test.helper import http_server_port, try_rm
|
from test.helper import http_server_port, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.downloader.http import HttpFD
|
from yt_dlp.downloader.http import HttpFD
|
||||||
from yt_dlp.utils import encodeFilename
|
|
||||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
@@ -82,12 +81,12 @@ def download(self, params, ep):
|
|||||||
ydl = YoutubeDL(params)
|
ydl = YoutubeDL(params)
|
||||||
downloader = HttpFD(ydl, params)
|
downloader = HttpFD(ydl, params)
|
||||||
filename = 'testfile.mp4'
|
filename = 'testfile.mp4'
|
||||||
try_rm(encodeFilename(filename))
|
try_rm(filename)
|
||||||
self.assertTrue(downloader.real_download(filename, {
|
self.assertTrue(downloader.real_download(filename, {
|
||||||
'url': f'http://127.0.0.1:{self.port}/{ep}',
|
'url': f'http://127.0.0.1:{self.port}/{ep}',
|
||||||
}), ep)
|
}), ep)
|
||||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
self.assertEqual(os.path.getsize(filename), TEST_SIZE, ep)
|
||||||
try_rm(encodeFilename(filename))
|
try_rm(filename)
|
||||||
|
|
||||||
def download_all(self, params):
|
def download_all(self, params):
|
||||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||||
|
|||||||
@@ -247,6 +247,7 @@ def ctx(request):
|
|||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||||
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||||
class TestHTTPProxy:
|
class TestHTTPProxy:
|
||||||
def test_http_no_auth(self, handler, ctx):
|
def test_http_no_auth(self, handler, ctx):
|
||||||
@@ -315,6 +316,7 @@ def test_http_with_idn(self, handler, ctx):
|
|||||||
('Requests', 'https'),
|
('Requests', 'https'),
|
||||||
('CurlCFFI', 'https'),
|
('CurlCFFI', 'https'),
|
||||||
], indirect=True)
|
], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||||
class TestHTTPConnectProxy:
|
class TestHTTPConnectProxy:
|
||||||
def test_http_connect_no_auth(self, handler, ctx):
|
def test_http_connect_no_auth(self, handler, ctx):
|
||||||
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||||
@@ -331,10 +333,6 @@ def test_http_connect_auth(self, handler, ctx):
|
|||||||
assert proxy_info['proxy'] == server_address
|
assert proxy_info['proxy'] == server_address
|
||||||
assert 'Proxy-Authorization' in proxy_info['headers']
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||||
|
|
||||||
@pytest.mark.skip_handler(
|
|
||||||
'Requests',
|
|
||||||
'bug in urllib3 causes unclosed socket: https://github.com/urllib3/urllib3/issues/3374',
|
|
||||||
)
|
|
||||||
def test_http_connect_bad_auth(self, handler, ctx):
|
def test_http_connect_bad_auth(self, handler, ctx):
|
||||||
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||||
|
|||||||
60
test/test_jsc/conftest.py
Normal file
60
test/test_jsc/conftest.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import re
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import yt_dlp.globals
|
||||||
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
_TESTDATA_PATH = pathlib.Path(__file__).parent.parent / 'testdata/sigs'
|
||||||
|
_player_re = re.compile(r'^.+/player/(?P<id>[a-zA-Z0-9_/.-]+)\.js$')
|
||||||
|
_player_id_trans = str.maketrans(dict.fromkeys('/.-', '_'))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ie() -> InfoExtractor:
|
||||||
|
runtime_names = yt_dlp.globals.supported_js_runtimes.value
|
||||||
|
ydl = YoutubeDL({'js_runtimes': {key: {} for key in runtime_names}})
|
||||||
|
ie = ydl.get_info_extractor('Youtube')
|
||||||
|
|
||||||
|
def _load_player(video_id, player_url, fatal=True):
|
||||||
|
match = _player_re.match(player_url)
|
||||||
|
test_id = match.group('id').translate(_player_id_trans)
|
||||||
|
cached_file = _TESTDATA_PATH / f'player-{test_id}.js'
|
||||||
|
|
||||||
|
if cached_file.exists():
|
||||||
|
return cached_file.read_text()
|
||||||
|
|
||||||
|
if code := ie._download_webpage(player_url, video_id, fatal=fatal):
|
||||||
|
_TESTDATA_PATH.mkdir(exist_ok=True, parents=True)
|
||||||
|
cached_file.write_text(code)
|
||||||
|
return code
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
ie._load_player = _load_player
|
||||||
|
return ie
|
||||||
|
|
||||||
|
|
||||||
|
class MockLogger:
|
||||||
|
def trace(self, message: str):
|
||||||
|
print(f'trace: {message}')
|
||||||
|
|
||||||
|
def debug(self, message: str, *, once=False):
|
||||||
|
print(f'debug: {message}')
|
||||||
|
|
||||||
|
def info(self, message: str):
|
||||||
|
print(f'info: {message}')
|
||||||
|
|
||||||
|
def warning(self, message: str, *, once=False):
|
||||||
|
print(f'warning: {message}')
|
||||||
|
|
||||||
|
def error(self, message: str):
|
||||||
|
print(f'error: {message}')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logger():
|
||||||
|
return MockLogger()
|
||||||
128
test/test_jsc/test_ejs_integration.py
Normal file
128
test/test_jsc/test_ejs_integration.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
|
import enum
|
||||||
|
import importlib.util
|
||||||
|
import json
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||||
|
JsChallengeRequest,
|
||||||
|
JsChallengeType,
|
||||||
|
JsChallengeProviderResponse,
|
||||||
|
JsChallengeResponse,
|
||||||
|
NChallengeInput,
|
||||||
|
NChallengeOutput,
|
||||||
|
SigChallengeInput,
|
||||||
|
SigChallengeOutput,
|
||||||
|
)
|
||||||
|
from yt_dlp.extractor.youtube.jsc._builtin.bun import BunJCP
|
||||||
|
from yt_dlp.extractor.youtube.jsc._builtin.deno import DenoJCP
|
||||||
|
from yt_dlp.extractor.youtube.jsc._builtin.node import NodeJCP
|
||||||
|
from yt_dlp.extractor.youtube.jsc._builtin.quickjs import QuickJSJCP
|
||||||
|
|
||||||
|
|
||||||
|
_has_ejs = bool(importlib.util.find_spec('yt_dlp_ejs'))
|
||||||
|
pytestmark = pytest.mark.skipif(not _has_ejs, reason='yt-dlp-ejs not available')
|
||||||
|
|
||||||
|
|
||||||
|
class Variant(enum.Enum):
|
||||||
|
main = 'player_ias.vflset/en_US/base.js'
|
||||||
|
tcc = 'player_ias_tcc.vflset/en_US/base.js'
|
||||||
|
tce = 'player_ias_tce.vflset/en_US/base.js'
|
||||||
|
es5 = 'player_es5.vflset/en_US/base.js'
|
||||||
|
es6 = 'player_es6.vflset/en_US/base.js'
|
||||||
|
tv = 'tv-player-ias.vflset/tv-player-ias.js'
|
||||||
|
tv_es6 = 'tv-player-es6.vflset/tv-player-es6.js'
|
||||||
|
phone = 'player-plasma-ias-phone-en_US.vflset/base.js'
|
||||||
|
tablet = 'player-plasma-ias-tablet-en_US.vflset/base.js'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class Challenge:
|
||||||
|
player: str
|
||||||
|
variant: Variant
|
||||||
|
type: JsChallengeType
|
||||||
|
values: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||||
|
|
||||||
|
def url(self, /):
|
||||||
|
return f'https://www.youtube.com/s/player/{self.player}/{self.variant.value}'
|
||||||
|
|
||||||
|
|
||||||
|
CHALLENGES: list[Challenge] = [
|
||||||
|
Challenge('3d3ba064', Variant.tce, JsChallengeType.N, {
|
||||||
|
'ZdZIqFPQK-Ty8wId': 'qmtUsIz04xxiNW',
|
||||||
|
'4GMrWHyKI5cEvhDO': 'N9gmEX7YhKTSmw',
|
||||||
|
}),
|
||||||
|
Challenge('3d3ba064', Variant.tce, JsChallengeType.SIG, {
|
||||||
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||||
|
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3gqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kNyBf6HPuAuCduh-a7O',
|
||||||
|
}),
|
||||||
|
Challenge('5ec65609', Variant.tce, JsChallengeType.N, {
|
||||||
|
'0eRGgQWJGfT5rFHFj': '4SvMpDQH-vBJCw',
|
||||||
|
}),
|
||||||
|
Challenge('5ec65609', Variant.tce, JsChallengeType.SIG, {
|
||||||
|
'AAJAJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grH0rTMICA1mmDc0HoXgW3CAiAQQ4=CspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ=I':
|
||||||
|
'AJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grI0rTMICA1mmDc0HoXgW3CAiAQQ4HCspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ==',
|
||||||
|
}),
|
||||||
|
Challenge('6742b2b9', Variant.tce, JsChallengeType.N, {
|
||||||
|
'_HPB-7GFg1VTkn9u': 'qUAsPryAO_ByYg',
|
||||||
|
'K1t_fcB6phzuq2SF': 'Y7PcOt3VE62mog',
|
||||||
|
}),
|
||||||
|
Challenge('6742b2b9', Variant.tce, JsChallengeType.SIG, {
|
||||||
|
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJAA':
|
||||||
|
'AJfQdSswRAIgMVVvrovTbw6UNh99kPa4D_XQjGT4qYu7S6SHM8EjoCACIEQnz-nKN5RgG6iUTnNJC58csYPSrnS_SzricuUMJZGM',
|
||||||
|
}),
|
||||||
|
Challenge('2b83d2e0', Variant.main, JsChallengeType.N, {
|
||||||
|
'0eRGgQWJGfT5rFHFj': 'euHbygrCMLksxd',
|
||||||
|
}),
|
||||||
|
Challenge('2b83d2e0', Variant.main, JsChallengeType.SIG, {
|
||||||
|
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJA':
|
||||||
|
'-MGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKnMznQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJ',
|
||||||
|
}),
|
||||||
|
Challenge('638ec5c6', Variant.main, JsChallengeType.N, {
|
||||||
|
'ZdZIqFPQK-Ty8wId': '1qov8-KM-yH',
|
||||||
|
}),
|
||||||
|
Challenge('638ec5c6', Variant.main, JsChallengeType.SIG, {
|
||||||
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||||
|
'MhudCuAuP-6fByOk1_GNXN7gNHHShjyXS2VOgsEItAJz0tipeav0OmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
|
||||||
|
requests: list[JsChallengeRequest] = []
|
||||||
|
responses: list[JsChallengeProviderResponse] = []
|
||||||
|
for test in CHALLENGES:
|
||||||
|
input_type, output_type = {
|
||||||
|
JsChallengeType.N: (NChallengeInput, NChallengeOutput),
|
||||||
|
JsChallengeType.SIG: (SigChallengeInput, SigChallengeOutput),
|
||||||
|
}[test.type]
|
||||||
|
|
||||||
|
request = JsChallengeRequest(test.type, input_type(test.url(), list(test.values.keys())), test.player)
|
||||||
|
requests.append(request)
|
||||||
|
responses.append(JsChallengeProviderResponse(request, JsChallengeResponse(test.type, output_type(test.values))))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(params=[BunJCP, DenoJCP, NodeJCP, QuickJSJCP])
|
||||||
|
def jcp(request, ie, logger):
|
||||||
|
obj = request.param(ie, logger, None)
|
||||||
|
if not obj.is_available():
|
||||||
|
pytest.skip(f'{obj.PROVIDER_NAME} is not available')
|
||||||
|
obj.is_dev = True
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.download
|
||||||
|
def test_bulk_requests(jcp):
|
||||||
|
assert list(jcp.bulk_solve(requests)) == responses
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.download
|
||||||
|
def test_using_cached_player(jcp):
|
||||||
|
first_player_requests = requests[:3]
|
||||||
|
player = jcp._get_player(first_player_requests[0].video_id, first_player_requests[0].input.player_url)
|
||||||
|
initial = json.loads(jcp._run_js_runtime(jcp._construct_stdin(player, False, first_player_requests)))
|
||||||
|
preprocessed = initial.pop('preprocessed_player')
|
||||||
|
result = json.loads(jcp._run_js_runtime(jcp._construct_stdin(preprocessed, True, first_player_requests)))
|
||||||
|
|
||||||
|
assert initial == result
|
||||||
194
test/test_jsc/test_provider.py
Normal file
194
test/test_jsc/test_provider.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||||
|
JsChallengeProvider,
|
||||||
|
JsChallengeRequest,
|
||||||
|
JsChallengeProviderResponse,
|
||||||
|
JsChallengeProviderRejectedRequest,
|
||||||
|
JsChallengeType,
|
||||||
|
JsChallengeResponse,
|
||||||
|
NChallengeOutput,
|
||||||
|
NChallengeInput,
|
||||||
|
JsChallengeProviderError,
|
||||||
|
register_provider,
|
||||||
|
register_preference,
|
||||||
|
)
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||||
|
from yt_dlp.utils import ExtractorError
|
||||||
|
from yt_dlp.extractor.youtube.jsc._registry import _jsc_preferences, _jsc_providers
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleJCP(JsChallengeProvider):
|
||||||
|
PROVIDER_NAME = 'example-provider'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
_SUPPORTED_TYPES = [JsChallengeType.N]
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_bulk_solve(self, requests):
|
||||||
|
for request in requests:
|
||||||
|
results = dict.fromkeys(request.input.challenges, 'example-solution')
|
||||||
|
response = JsChallengeResponse(
|
||||||
|
type=request.type,
|
||||||
|
output=NChallengeOutput(results=results))
|
||||||
|
yield JsChallengeProviderResponse(request=request, response=response)
|
||||||
|
|
||||||
|
|
||||||
|
PLAYER_URL = 'https://example.com/player.js'
|
||||||
|
|
||||||
|
|
||||||
|
class TestJsChallengeProvider:
|
||||||
|
# note: some test covered in TestPoTokenProvider which shares the same base class
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(JsChallengeProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_bulk_solve_method(self, ie, logger):
|
||||||
|
class MissingMethodsJCP(JsChallengeProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError, match='bulk_solve'):
|
||||||
|
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsJCP(JsChallengeProvider):
|
||||||
|
def _real_bulk_solve(self, requests):
|
||||||
|
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
with pytest.raises(TypeError, match='is_available'):
|
||||||
|
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderJCP(JsChallengeProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_bulk_solve(self, requests):
|
||||||
|
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
provider = BarebonesProviderJCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_example_provider_success(self, ie, logger):
|
||||||
|
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
request = JsChallengeRequest(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||||
|
|
||||||
|
request_two = JsChallengeRequest(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge-2']))
|
||||||
|
|
||||||
|
responses = list(provider.bulk_solve([request, request_two]))
|
||||||
|
assert len(responses) == 2
|
||||||
|
assert all(isinstance(r, JsChallengeProviderResponse) for r in responses)
|
||||||
|
assert responses == [
|
||||||
|
JsChallengeProviderResponse(
|
||||||
|
request=request,
|
||||||
|
response=JsChallengeResponse(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
JsChallengeProviderResponse(
|
||||||
|
request=request_two,
|
||||||
|
response=JsChallengeResponse(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
output=NChallengeOutput(results={'example-challenge-2': 'example-solution'}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_provider_unsupported_challenge_type(self, ie, logger):
|
||||||
|
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||||
|
request_supported = JsChallengeRequest(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||||
|
request_unsupported = JsChallengeRequest(
|
||||||
|
type=JsChallengeType.SIG,
|
||||||
|
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||||
|
responses = list(provider.bulk_solve([request_supported, request_unsupported, request_supported]))
|
||||||
|
assert len(responses) == 3
|
||||||
|
# Requests are validated first before continuing to _real_bulk_solve
|
||||||
|
assert isinstance(responses[0], JsChallengeProviderResponse)
|
||||||
|
assert isinstance(responses[0].error, JsChallengeProviderRejectedRequest)
|
||||||
|
assert responses[0].request is request_unsupported
|
||||||
|
assert str(responses[0].error) == 'JS Challenge type "JsChallengeType.SIG" is not supported by example-provider'
|
||||||
|
|
||||||
|
assert responses[1:] == [
|
||||||
|
JsChallengeProviderResponse(
|
||||||
|
request=request_supported,
|
||||||
|
response=JsChallengeResponse(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
JsChallengeProviderResponse(
|
||||||
|
request=request_supported,
|
||||||
|
response=JsChallengeResponse(
|
||||||
|
type=JsChallengeType.N,
|
||||||
|
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_provider_get_player(self, ie, logger):
|
||||||
|
ie._load_player = lambda video_id, player_url, fatal: (video_id, player_url, fatal)
|
||||||
|
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider._get_player('video123', PLAYER_URL) == ('video123', PLAYER_URL, True)
|
||||||
|
|
||||||
|
def test_provider_get_player_error(self, ie, logger):
|
||||||
|
def raise_error(video_id, player_url, fatal):
|
||||||
|
raise ExtractorError('Failed to load player')
|
||||||
|
|
||||||
|
ie._load_player = raise_error
|
||||||
|
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||||
|
with pytest.raises(JsChallengeProviderError, match='Failed to load player for JS challenge'):
|
||||||
|
provider._get_player('video123', PLAYER_URL)
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(JsChallengeProvider):
|
||||||
|
PROVIDER_NAME = 'invalid-suffix'
|
||||||
|
|
||||||
|
def _real_bulk_solve(self, requests):
|
||||||
|
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_provider(ie):
|
||||||
|
|
||||||
|
@register_provider
|
||||||
|
class UnavailableProviderJCP(JsChallengeProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _real_bulk_solve(self, requests):
|
||||||
|
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
assert _jsc_providers.value.get('UnavailableProvider') == UnavailableProviderJCP
|
||||||
|
_jsc_providers.value.pop('UnavailableProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_preference(ie):
|
||||||
|
before = len(_jsc_preferences.value)
|
||||||
|
|
||||||
|
@register_preference(ExampleJCP)
|
||||||
|
def unavailable_preference(*args, **kwargs):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_jsc_preferences.value) == before + 1
|
||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
import math
|
import math
|
||||||
|
|
||||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter, js_number_to_string
|
||||||
|
|
||||||
|
|
||||||
class NaN:
|
class NaN:
|
||||||
@@ -93,6 +93,16 @@ def test_operators(self):
|
|||||||
self._test('function f(){return 0 ?? 42;}', 0)
|
self._test('function f(){return 0 ?? 42;}', 0)
|
||||||
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
||||||
self._test('function f(){return 0 - 7 * - 6;}', 42)
|
self._test('function f(){return 0 - 7 * - 6;}', 42)
|
||||||
|
self._test('function f(){return true << "5";}', 32)
|
||||||
|
self._test('function f(){return true << true;}', 2)
|
||||||
|
self._test('function f(){return "19" & "21.9";}', 17)
|
||||||
|
self._test('function f(){return "19" & false;}', 0)
|
||||||
|
self._test('function f(){return "11.0" >> "2.1";}', 2)
|
||||||
|
self._test('function f(){return 5 ^ 9;}', 12)
|
||||||
|
self._test('function f(){return 0.0 << NaN}', 0)
|
||||||
|
self._test('function f(){return null << undefined}', 0)
|
||||||
|
# TODO: Does not work due to number too large
|
||||||
|
# self._test('function f(){return 21 << 4294967297}', 42)
|
||||||
|
|
||||||
def test_array_access(self):
|
def test_array_access(self):
|
||||||
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
||||||
@@ -108,6 +118,7 @@ def test_assignments(self):
|
|||||||
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
||||||
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
||||||
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
||||||
|
self._test('function f(){var x = 2; var y = ["a", "b"]; y[x%y["length"]]="z"; return y}', ['z', 'b'])
|
||||||
|
|
||||||
@unittest.skip('Not implemented')
|
@unittest.skip('Not implemented')
|
||||||
def test_comments(self):
|
def test_comments(self):
|
||||||
@@ -374,7 +385,7 @@ def test_negative(self):
|
|||||||
@unittest.skip('Not implemented')
|
@unittest.skip('Not implemented')
|
||||||
def test_packed(self):
|
def test_packed(self):
|
||||||
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
||||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
|
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|'))) # noqa: SIM905
|
||||||
|
|
||||||
def test_join(self):
|
def test_join(self):
|
||||||
test_input = list('test')
|
test_input = list('test')
|
||||||
@@ -393,6 +404,8 @@ def test_split(self):
|
|||||||
test_result = list('test')
|
test_result = list('test')
|
||||||
tests = [
|
tests = [
|
||||||
'function f(a, b){return a.split(b)}',
|
'function f(a, b){return a.split(b)}',
|
||||||
|
'function f(a, b){return a["split"](b)}',
|
||||||
|
'function f(a, b){let x = ["split"]; return a[x[0]](b)}',
|
||||||
'function f(a, b){return String.prototype.split.call(a, b)}',
|
'function f(a, b){return String.prototype.split.call(a, b)}',
|
||||||
'function f(a, b){return String.prototype.split.apply(a, [b])}',
|
'function f(a, b){return String.prototype.split.apply(a, [b])}',
|
||||||
]
|
]
|
||||||
@@ -403,6 +416,131 @@ def test_split(self):
|
|||||||
self._test(jsi, [''], args=['', '-'])
|
self._test(jsi, [''], args=['', '-'])
|
||||||
self._test(jsi, [], args=['', ''])
|
self._test(jsi, [], args=['', ''])
|
||||||
|
|
||||||
|
def test_slice(self):
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice()}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(5)}', [5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(99)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-2)}', [7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-99)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 0)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, 0)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 1)}', [0])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(3, 6)}', [3, 4, 5])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, -1)}', [1, 2, 3, 4, 5, 6, 7])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-1, 1)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-3, -1)}', [6, 7])
|
||||||
|
self._test('function f(){return "012345678".slice()}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(0)}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(5)}', '5678')
|
||||||
|
self._test('function f(){return "012345678".slice(99)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(-2)}', '78')
|
||||||
|
self._test('function f(){return "012345678".slice(-99)}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(0, 0)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(1, 0)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(0, 1)}', '0')
|
||||||
|
self._test('function f(){return "012345678".slice(3, 6)}', '345')
|
||||||
|
self._test('function f(){return "012345678".slice(1, -1)}', '1234567')
|
||||||
|
self._test('function f(){return "012345678".slice(-1, 1)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(-3, -1)}', '67')
|
||||||
|
|
||||||
|
def test_splice(self):
|
||||||
|
self._test('function f(){var T = ["0", "1", "2"]; T["splice"](2, 1, "0")[0]; return T }', ['0', '1', '0'])
|
||||||
|
|
||||||
|
def test_js_number_to_string(self):
|
||||||
|
for test, radix, expected in [
|
||||||
|
(0, None, '0'),
|
||||||
|
(-0, None, '0'),
|
||||||
|
(0.0, None, '0'),
|
||||||
|
(-0.0, None, '0'),
|
||||||
|
(math.nan, None, 'NaN'),
|
||||||
|
(-math.nan, None, 'NaN'),
|
||||||
|
(math.inf, None, 'Infinity'),
|
||||||
|
(-math.inf, None, '-Infinity'),
|
||||||
|
(10 ** 21.5, 8, '526665530627250154000000'),
|
||||||
|
(6, 2, '110'),
|
||||||
|
(254, 16, 'fe'),
|
||||||
|
(-10, 2, '-1010'),
|
||||||
|
(-0xff, 2, '-11111111'),
|
||||||
|
(0.1 + 0.2, 16, '0.4cccccccccccd'),
|
||||||
|
(1234.1234, 10, '1234.1234'),
|
||||||
|
# (1000000000000000128, 10, '1000000000000000100')
|
||||||
|
]:
|
||||||
|
assert js_number_to_string(test, radix) == expected
|
||||||
|
|
||||||
|
def test_extract_function(self):
|
||||||
|
jsi = JSInterpreter('function a(b) { return b + 1; }')
|
||||||
|
func = jsi.extract_function('a')
|
||||||
|
self.assertEqual(func([2]), 3)
|
||||||
|
|
||||||
|
def test_extract_function_with_global_stack(self):
|
||||||
|
jsi = JSInterpreter('function c(d) { return d + e + f + g; }')
|
||||||
|
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
||||||
|
self.assertEqual(func([1]), 1111)
|
||||||
|
|
||||||
|
def test_extract_object(self):
|
||||||
|
jsi = JSInterpreter('var a={};a.xy={};var xy;var zxy={};xy={z:function(){return "abc"}};')
|
||||||
|
self.assertTrue('z' in jsi.extract_object('xy', None))
|
||||||
|
|
||||||
|
def test_increment_decrement(self):
|
||||||
|
self._test('function f() { var x = 1; return ++x; }', 2)
|
||||||
|
self._test('function f() { var x = 1; return x++; }', 1)
|
||||||
|
self._test('function f() { var x = 1; x--; return x }', 0)
|
||||||
|
self._test('function f() { var y; var x = 1; x++, --x, x--, x--, y="z", "abc", x++; return --x }', -1)
|
||||||
|
self._test('function f() { var a = "test--"; return a; }', 'test--')
|
||||||
|
self._test('function f() { var b = 1; var a = "b--"; return a; }', 'b--')
|
||||||
|
|
||||||
|
def test_nested_function_scoping(self):
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var g = function() {
|
||||||
|
var P = 2;
|
||||||
|
return P;
|
||||||
|
};
|
||||||
|
var P = 1;
|
||||||
|
g();
|
||||||
|
return P;
|
||||||
|
}
|
||||||
|
''', 1)
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var x = function() {
|
||||||
|
for (var w = 1, M = []; w < 2; w++) switch (w) {
|
||||||
|
case 1:
|
||||||
|
M.push("a");
|
||||||
|
case 2:
|
||||||
|
M.push("b");
|
||||||
|
}
|
||||||
|
return M
|
||||||
|
};
|
||||||
|
var w = "c";
|
||||||
|
var M = "d";
|
||||||
|
var y = x();
|
||||||
|
y.push(w);
|
||||||
|
y.push(M);
|
||||||
|
return y;
|
||||||
|
}
|
||||||
|
''', ['a', 'b', 'c', 'd'])
|
||||||
|
self._test(R'''
|
||||||
|
function f() {
|
||||||
|
var P, Q;
|
||||||
|
var z = 100;
|
||||||
|
var g = function() {
|
||||||
|
var P, Q; P = 2; Q = 15;
|
||||||
|
z = 0;
|
||||||
|
return P+Q;
|
||||||
|
};
|
||||||
|
P = 1; Q = 10;
|
||||||
|
var x = g(), y = 3;
|
||||||
|
return P+Q+x+y+z;
|
||||||
|
}
|
||||||
|
''', 31)
|
||||||
|
|
||||||
|
def test_undefined_varnames(self):
|
||||||
|
jsi = JSInterpreter('function f(){ var a; return [a, b]; }')
|
||||||
|
self._test(jsi, [JS_Undefined, JS_Undefined])
|
||||||
|
self.assertEqual(jsi._undefined_varnames, {'b'})
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -22,7 +23,6 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib.error
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
import zlib
|
import zlib
|
||||||
@@ -39,6 +39,7 @@
|
|||||||
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
||||||
from yt_dlp.networking import (
|
from yt_dlp.networking import (
|
||||||
HEADRequest,
|
HEADRequest,
|
||||||
|
PATCHRequest,
|
||||||
PUTRequest,
|
PUTRequest,
|
||||||
Request,
|
Request,
|
||||||
RequestDirector,
|
RequestDirector,
|
||||||
@@ -222,10 +223,7 @@ def do_GET(self):
|
|||||||
if encoding == 'br' and brotli:
|
if encoding == 'br' and brotli:
|
||||||
payload = brotli.compress(payload)
|
payload = brotli.compress(payload)
|
||||||
elif encoding == 'gzip':
|
elif encoding == 'gzip':
|
||||||
buf = io.BytesIO()
|
payload = gzip.compress(payload, mtime=0)
|
||||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
|
||||||
f.write(payload)
|
|
||||||
payload = buf.getvalue()
|
|
||||||
elif encoding == 'deflate':
|
elif encoding == 'deflate':
|
||||||
payload = zlib.compress(payload)
|
payload = zlib.compress(payload)
|
||||||
elif encoding == 'unsupported':
|
elif encoding == 'unsupported':
|
||||||
@@ -314,6 +312,7 @@ def setup_class(cls):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||||
|
|
||||||
def test_verify_cert(self, handler):
|
def test_verify_cert(self, handler):
|
||||||
@@ -614,12 +613,14 @@ def test_source_address(self, handler):
|
|||||||
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
|
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
|
||||||
assert source_address == data
|
assert source_address == data
|
||||||
|
|
||||||
# Not supported by CurlCFFI
|
|
||||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||||
def test_gzip_trailing_garbage(self, handler):
|
def test_gzip_trailing_garbage(self, handler):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
|
res = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage'))
|
||||||
|
data = res.read().decode()
|
||||||
assert data == '<html><video src="/vid.mp4" /></html>'
|
assert data == '<html><video src="/vid.mp4" /></html>'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
||||||
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
||||||
@@ -631,6 +632,8 @@ def test_brotli(self, handler):
|
|||||||
headers={'ytdl-encoding': 'br'}))
|
headers={'ytdl-encoding': 'br'}))
|
||||||
assert res.headers.get('Content-Encoding') == 'br'
|
assert res.headers.get('Content-Encoding') == 'br'
|
||||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_deflate(self, handler):
|
def test_deflate(self, handler):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@@ -640,6 +643,8 @@ def test_deflate(self, handler):
|
|||||||
headers={'ytdl-encoding': 'deflate'}))
|
headers={'ytdl-encoding': 'deflate'}))
|
||||||
assert res.headers.get('Content-Encoding') == 'deflate'
|
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_gzip(self, handler):
|
def test_gzip(self, handler):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@@ -649,6 +654,8 @@ def test_gzip(self, handler):
|
|||||||
headers={'ytdl-encoding': 'gzip'}))
|
headers={'ytdl-encoding': 'gzip'}))
|
||||||
assert res.headers.get('Content-Encoding') == 'gzip'
|
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_multiple_encodings(self, handler):
|
def test_multiple_encodings(self, handler):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@@ -659,6 +666,8 @@ def test_multiple_encodings(self, handler):
|
|||||||
headers={'ytdl-encoding': pair}))
|
headers={'ytdl-encoding': pair}))
|
||||||
assert res.headers.get('Content-Encoding') == pair
|
assert res.headers.get('Content-Encoding') == pair
|
||||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||||
def test_unsupported_encoding(self, handler):
|
def test_unsupported_encoding(self, handler):
|
||||||
@@ -669,6 +678,8 @@ def test_unsupported_encoding(self, handler):
|
|||||||
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
||||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||||
assert res.read() == b'raw'
|
assert res.read() == b'raw'
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_read(self, handler):
|
def test_read(self, handler):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@@ -676,9 +687,13 @@ def test_read(self, handler):
|
|||||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
|
rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
|
||||||
assert res.readable()
|
assert res.readable()
|
||||||
assert res.read(1) == b'H'
|
assert res.read(1) == b'H'
|
||||||
|
# Ensure we don't close the adaptor yet
|
||||||
|
assert not res.closed
|
||||||
assert res.read(3) == b'ost'
|
assert res.read(3) == b'ost'
|
||||||
assert res.read().decode().endswith('\n\n')
|
assert res.read().decode().endswith('\n\n')
|
||||||
assert res.read() == b''
|
assert res.read() == b''
|
||||||
|
# Should auto-close and mark the response adaptor as closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_request_disable_proxy(self, handler):
|
def test_request_disable_proxy(self, handler):
|
||||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
||||||
@@ -720,8 +735,40 @@ def test_allproxy(self, handler):
|
|||||||
rh, Request(
|
rh, Request(
|
||||||
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': 'http://10.255.255.255'})).close()
|
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': 'http://10.255.255.255'})).close()
|
||||||
|
|
||||||
|
@pytest.mark.skip_handlers_if(lambda _, handler: handler not in ['Urllib', 'CurlCFFI'], 'handler does not support keep_header_casing')
|
||||||
|
def test_keep_header_casing(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(
|
||||||
|
rh, Request(
|
||||||
|
f'http://127.0.0.1:{self.http_port}/headers', headers={'X-test-heaDer': 'test'}, extensions={'keep_header_casing': True})).read().decode()
|
||||||
|
|
||||||
|
assert 'X-test-heaDer: test' in res
|
||||||
|
|
||||||
|
def test_partial_read_then_full_read(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
for encoding in ('', 'gzip', 'deflate'):
|
||||||
|
res = validate_and_send(rh, Request(
|
||||||
|
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||||
|
headers={'ytdl-encoding': encoding}))
|
||||||
|
assert res.headers.get('Content-Encoding') == encoding
|
||||||
|
assert res.read(6) == b'<html>'
|
||||||
|
assert res.read(0) == b''
|
||||||
|
assert res.read() == b'<video src="/vid.mp4" /></html>'
|
||||||
|
|
||||||
|
def test_partial_read_greater_than_response_then_full_read(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
for encoding in ('', 'gzip', 'deflate'):
|
||||||
|
res = validate_and_send(rh, Request(
|
||||||
|
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||||
|
headers={'ytdl-encoding': encoding}))
|
||||||
|
assert res.headers.get('Content-Encoding') == encoding
|
||||||
|
assert res.read(512) == b'<html><video src="/vid.mp4" /></html>'
|
||||||
|
assert res.read(0) == b''
|
||||||
|
assert res.read() == b''
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||||
class TestClientCertificate:
|
class TestClientCertificate:
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_class(cls):
|
def setup_class(cls):
|
||||||
@@ -822,6 +869,24 @@ def test_remove_logging_handler(self, handler, logger_name):
|
|||||||
rh.close()
|
rh.close()
|
||||||
assert len(logging_handlers) == before_count
|
assert len(logging_handlers) == before_count
|
||||||
|
|
||||||
|
def test_wrap_request_errors(self):
|
||||||
|
class TestRequestHandler(RequestHandler):
|
||||||
|
def _validate(self, request):
|
||||||
|
if request.headers.get('x-fail'):
|
||||||
|
raise UnsupportedRequest('test error')
|
||||||
|
|
||||||
|
def _send(self, request: Request):
|
||||||
|
raise RequestError('test error')
|
||||||
|
|
||||||
|
with TestRequestHandler(logger=FakeLogger()) as rh:
|
||||||
|
with pytest.raises(UnsupportedRequest, match='test error') as exc_info:
|
||||||
|
rh.validate(Request('http://example.com', headers={'x-fail': '1'}))
|
||||||
|
assert exc_info.value.handler is rh
|
||||||
|
|
||||||
|
with pytest.raises(RequestError, match='test error') as exc_info:
|
||||||
|
rh.send(Request('http://example.com'))
|
||||||
|
assert exc_info.value.handler is rh
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||||
class TestUrllibRequestHandler(TestRequestHandlerBase):
|
class TestUrllibRequestHandler(TestRequestHandlerBase):
|
||||||
@@ -841,11 +906,53 @@ def test_file_urls(self, handler):
|
|||||||
|
|
||||||
with handler(enable_file_urls=True) as rh:
|
with handler(enable_file_urls=True) as rh:
|
||||||
res = validate_and_send(rh, req)
|
res = validate_and_send(rh, req)
|
||||||
assert res.read() == b'foobar'
|
assert res.read(1) == b'f'
|
||||||
res.close()
|
assert not res.fp.closed
|
||||||
|
assert res.read() == b'oobar'
|
||||||
|
# Should automatically close the underlying file object
|
||||||
|
assert res.fp.closed
|
||||||
|
|
||||||
os.unlink(tf.name)
|
os.unlink(tf.name)
|
||||||
|
|
||||||
|
def test_data_uri_auto_close(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||||
|
assert res.read() == b'hello world'
|
||||||
|
# Should automatically close the underlying file object
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
def test_http_response_auto_close(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||||
|
assert res.read() == b'<html></html>'
|
||||||
|
# Should automatically close the underlying file object in the HTTP Response
|
||||||
|
assert isinstance(res.fp, http.client.HTTPResponse)
|
||||||
|
assert res.fp.fp is None
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
def test_data_uri_partial_read_then_full_read(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||||
|
assert res.read(6) == b'hello '
|
||||||
|
assert res.read(0) == b''
|
||||||
|
assert res.read() == b'world'
|
||||||
|
# Should automatically close the underlying file object
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
def test_data_uri_partial_read_greater_than_response_then_full_read(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||||
|
assert res.read(512) == b'hello world'
|
||||||
|
# Response and its underlying file object should already be closed now
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
assert res.read(0) == b''
|
||||||
|
assert res.read() == b''
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
def test_http_error_returns_content(self, handler):
|
def test_http_error_returns_content(self, handler):
|
||||||
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
||||||
def get_response():
|
def get_response():
|
||||||
@@ -978,8 +1085,17 @@ def mock_close(*args, **kwargs):
|
|||||||
rh.close()
|
rh.close()
|
||||||
assert called
|
assert called
|
||||||
|
|
||||||
|
def test_http_response_auto_close(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||||
|
assert res.read() == b'<html></html>'
|
||||||
|
# Should automatically close the underlying file object in the HTTP Response
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||||
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
||||||
|
|
||||||
@pytest.mark.parametrize('params,extensions', [
|
@pytest.mark.parametrize('params,extensions', [
|
||||||
@@ -1143,6 +1259,14 @@ def close(self):
|
|||||||
assert res4.closed
|
assert res4.closed
|
||||||
assert res4._buffer == b''
|
assert res4._buffer == b''
|
||||||
|
|
||||||
|
def test_http_response_auto_close(self, handler):
|
||||||
|
with handler() as rh:
|
||||||
|
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||||
|
assert res.read() == b'<html></html>'
|
||||||
|
# Should automatically close the underlying file object in the HTTP Response
|
||||||
|
assert res.fp.closed
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
|
||||||
def run_validation(handler, error, req, **handler_kwargs):
|
def run_validation(handler, error, req, **handler_kwargs):
|
||||||
with handler(**handler_kwargs) as rh:
|
with handler(**handler_kwargs) as rh:
|
||||||
@@ -1271,6 +1395,7 @@ class HTTPSupportedRH(ValidationRH):
|
|||||||
({'legacy_ssl': False}, False),
|
({'legacy_ssl': False}, False),
|
||||||
({'legacy_ssl': True}, False),
|
({'legacy_ssl': True}, False),
|
||||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||||
|
({'keep_header_casing': True}, UnsupportedRequest),
|
||||||
]),
|
]),
|
||||||
('Requests', 'http', [
|
('Requests', 'http', [
|
||||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||||
@@ -1281,6 +1406,9 @@ class HTTPSupportedRH(ValidationRH):
|
|||||||
({'legacy_ssl': False}, False),
|
({'legacy_ssl': False}, False),
|
||||||
({'legacy_ssl': True}, False),
|
({'legacy_ssl': True}, False),
|
||||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||||
|
({'keep_header_casing': False}, False),
|
||||||
|
({'keep_header_casing': True}, False),
|
||||||
|
({'keep_header_casing': 'notabool'}, AssertionError),
|
||||||
]),
|
]),
|
||||||
('CurlCFFI', 'http', [
|
('CurlCFFI', 'http', [
|
||||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||||
@@ -1826,6 +1954,7 @@ def test_method(self):
|
|||||||
|
|
||||||
def test_request_helpers(self):
|
def test_request_helpers(self):
|
||||||
assert HEADRequest('http://example.com').method == 'HEAD'
|
assert HEADRequest('http://example.com').method == 'HEAD'
|
||||||
|
assert PATCHRequest('http://example.com').method == 'PATCH'
|
||||||
assert PUTRequest('http://example.com').method == 'PUT'
|
assert PUTRequest('http://example.com').method == 'PUT'
|
||||||
|
|
||||||
def test_headers(self):
|
def test_headers(self):
|
||||||
@@ -1993,6 +2122,30 @@ def test_compat(self):
|
|||||||
assert res.info() is res.headers
|
assert res.info() is res.headers
|
||||||
assert res.getheader('test') == res.get_header('test')
|
assert res.getheader('test') == res.get_header('test')
|
||||||
|
|
||||||
|
def test_auto_close(self):
|
||||||
|
# Should mark the response as closed if the underlying file is closed
|
||||||
|
class AutoCloseBytesIO(io.BytesIO):
|
||||||
|
def read(self, size=-1, /):
|
||||||
|
data = super().read(size)
|
||||||
|
self.close()
|
||||||
|
return data
|
||||||
|
|
||||||
|
fp = AutoCloseBytesIO(b'test')
|
||||||
|
res = Response(fp, url='test://', headers={}, status=200)
|
||||||
|
assert not res.closed
|
||||||
|
res.read()
|
||||||
|
assert res.closed
|
||||||
|
|
||||||
|
def test_close(self):
|
||||||
|
# Should not call close() on the underlying file when already closed
|
||||||
|
fp = MagicMock()
|
||||||
|
fp.closed = False
|
||||||
|
res = Response(fp, url='test://', headers={}, status=200)
|
||||||
|
res.close()
|
||||||
|
fp.closed = True
|
||||||
|
res.close()
|
||||||
|
assert fp.close.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
class TestImpersonateTarget:
|
class TestImpersonateTarget:
|
||||||
@pytest.mark.parametrize('target_str,expected', [
|
@pytest.mark.parametrize('target_str,expected', [
|
||||||
|
|||||||
@@ -20,7 +20,6 @@
|
|||||||
add_accept_encoding_header,
|
add_accept_encoding_header,
|
||||||
get_redirect_method,
|
get_redirect_method,
|
||||||
make_socks_proxy_opts,
|
make_socks_proxy_opts,
|
||||||
select_proxy,
|
|
||||||
ssl_load_certs,
|
ssl_load_certs,
|
||||||
)
|
)
|
||||||
from yt_dlp.networking.exceptions import (
|
from yt_dlp.networking.exceptions import (
|
||||||
@@ -28,7 +27,7 @@
|
|||||||
IncompleteRead,
|
IncompleteRead,
|
||||||
)
|
)
|
||||||
from yt_dlp.socks import ProxyType
|
from yt_dlp.socks import ProxyType
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
from yt_dlp.utils.networking import HTTPHeaderDict, select_proxy
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def test_default_overwrites(self):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' in sout)
|
self.assertTrue(b'has already been downloaded' in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||||
@@ -41,7 +41,7 @@ def test_yes_overwrites(self):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' not in sout)
|
self.assertTrue(b'has already been downloaded' not in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||||
|
|||||||
@@ -10,21 +10,71 @@
|
|||||||
sys.path.append(str(TEST_DATA_DIR))
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
importlib.invalidate_caches()
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
from yt_dlp.plugins import (
|
||||||
|
PACKAGE_NAME,
|
||||||
|
PluginSpec,
|
||||||
|
directories,
|
||||||
|
load_plugins,
|
||||||
|
load_all_plugins,
|
||||||
|
register_plugin_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.globals import (
|
||||||
|
extractors,
|
||||||
|
postprocessors,
|
||||||
|
plugin_dirs,
|
||||||
|
plugin_ies,
|
||||||
|
plugin_pps,
|
||||||
|
all_plugins_loaded,
|
||||||
|
plugin_specs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
EXTRACTOR_PLUGIN_SPEC = PluginSpec(
|
||||||
|
module_name='extractor',
|
||||||
|
suffix='IE',
|
||||||
|
destination=extractors,
|
||||||
|
plugin_destination=plugin_ies,
|
||||||
|
)
|
||||||
|
|
||||||
|
POSTPROCESSOR_PLUGIN_SPEC = PluginSpec(
|
||||||
|
module_name='postprocessor',
|
||||||
|
suffix='PP',
|
||||||
|
destination=postprocessors,
|
||||||
|
plugin_destination=plugin_pps,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_plugins():
|
||||||
|
plugin_ies.value = {}
|
||||||
|
plugin_pps.value = {}
|
||||||
|
plugin_dirs.value = ['default']
|
||||||
|
plugin_specs.value = {}
|
||||||
|
all_plugins_loaded.value = False
|
||||||
|
# Clearing override plugins is probably difficult
|
||||||
|
for module_name in tuple(sys.modules):
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
if module_name.startswith(f'{PACKAGE_NAME}.{plugin_type}.'):
|
||||||
|
del sys.modules[module_name]
|
||||||
|
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
|
||||||
class TestPlugins(unittest.TestCase):
|
class TestPlugins(unittest.TestCase):
|
||||||
|
|
||||||
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
reset_plugins()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
reset_plugins()
|
||||||
|
|
||||||
def test_directories_containing_plugins(self):
|
def test_directories_containing_plugins(self):
|
||||||
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||||
|
|
||||||
def test_extractor_classes(self):
|
def test_extractor_classes(self):
|
||||||
for module_name in tuple(sys.modules):
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
|
||||||
del sys.modules[module_name]
|
|
||||||
plugins_ie = load_plugins('extractor', 'IE')
|
|
||||||
|
|
||||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
@@ -34,17 +84,29 @@ def test_extractor_classes(self):
|
|||||||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules,
|
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules,
|
||||||
'loaded module beginning with underscore')
|
'loaded module beginning with underscore')
|
||||||
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('IgnorePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
# Don't load extractors with underscore prefix
|
# Don't load extractors with underscore prefix
|
||||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
# Don't load extractors not specified in __all__ (if supplied)
|
# Don't load extractors not specified in __all__ (if supplied)
|
||||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugin_ies.value)
|
||||||
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertIn('InAllPluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
# Don't load override extractors
|
||||||
|
self.assertNotIn('OverrideGenericIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('OverrideGenericIE', plugin_ies.value)
|
||||||
|
self.assertNotIn('_UnderscoreOverrideGenericIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('_UnderscoreOverrideGenericIE', plugin_ies.value)
|
||||||
|
|
||||||
def test_postprocessor_classes(self):
|
def test_postprocessor_classes(self):
|
||||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
self.assertIn('NormalPluginPP', plugin_pps.value)
|
||||||
|
|
||||||
def test_importing_zipped_module(self):
|
def test_importing_zipped_module(self):
|
||||||
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||||
@@ -57,10 +119,10 @@ def test_importing_zipped_module(self):
|
|||||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
plugins_ie = load_plugins('extractor', 'IE')
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@@ -68,6 +130,117 @@ def test_importing_zipped_module(self):
|
|||||||
os.remove(zip_path)
|
os.remove(zip_path)
|
||||||
importlib.invalidate_caches() # reset the import caches
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
def test_reloading_plugins(self):
|
||||||
|
reload_plugins_path = TEST_DATA_DIR / 'reload_plugins'
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
# Remove default folder and add reload_plugin path
|
||||||
|
sys.path.remove(str(TEST_DATA_DIR))
|
||||||
|
sys.path.append(str(reload_plugins_path))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
try:
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
|
self.assertIn(reload_plugins_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
|
self.assertTrue(
|
||||||
|
plugins_ie['NormalPluginIE'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original extractor plugin')
|
||||||
|
self.assertTrue(
|
||||||
|
extractors.value['NormalPluginIE'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original extractor plugin globally')
|
||||||
|
|
||||||
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
self.assertTrue(plugins_pp['NormalPluginPP'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original postprocessor plugin')
|
||||||
|
self.assertTrue(
|
||||||
|
postprocessors.value['NormalPluginPP'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original postprocessor plugin globally')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(reload_plugins_path))
|
||||||
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
def test_extractor_override_plugin(self):
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.generic import GenericIE
|
||||||
|
|
||||||
|
self.assertEqual(GenericIE.TEST_FIELD, 'override')
|
||||||
|
self.assertEqual(GenericIE.SECONDARY_TEST_FIELD, 'underscore-override')
|
||||||
|
|
||||||
|
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
# test that loading a second time doesn't wrap a second time
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
from yt_dlp.extractor.generic import GenericIE
|
||||||
|
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||||
|
|
||||||
|
def test_load_all_plugin_types(self):
|
||||||
|
|
||||||
|
# no plugin specs registered
|
||||||
|
load_all_plugins()
|
||||||
|
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
load_all_plugins()
|
||||||
|
self.assertTrue(all_plugins_loaded.value)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
def test_no_plugin_dirs(self):
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
plugin_dirs.value = []
|
||||||
|
load_all_plugins()
|
||||||
|
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
def test_set_plugin_dirs(self):
|
||||||
|
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||||
|
plugin_dirs.value = [custom_plugin_dir]
|
||||||
|
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||||
|
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
def test_invalid_plugin_dir(self):
|
||||||
|
plugin_dirs.value = ['invalid_dir']
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
def test_append_plugin_dirs(self):
|
||||||
|
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||||
|
|
||||||
|
self.assertEqual(plugin_dirs.value, ['default'])
|
||||||
|
plugin_dirs.value.append(custom_plugin_dir)
|
||||||
|
self.assertEqual(plugin_dirs.value, ['default', custom_plugin_dir])
|
||||||
|
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||||
|
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
def test_get_plugin_spec(self):
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertEqual(plugin_specs.value.get('extractor'), EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
self.assertEqual(plugin_specs.value.get('postprocessor'), POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
self.assertIsNone(plugin_specs.value.get('invalid'))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -8,6 +8,8 @@
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.utils import shell_quote
|
from yt_dlp.utils import shell_quote
|
||||||
from yt_dlp.postprocessor import (
|
from yt_dlp.postprocessor import (
|
||||||
@@ -47,7 +49,18 @@ def test_escaping(self):
|
|||||||
print('Skipping: ffmpeg not found')
|
print('Skipping: ffmpeg not found')
|
||||||
return
|
return
|
||||||
|
|
||||||
file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
|
test_data_dir = 'test/testdata/thumbnails'
|
||||||
|
generated_file = f'{test_data_dir}/empty.webp'
|
||||||
|
|
||||||
|
subprocess.check_call([
|
||||||
|
pp.executable, '-y', '-f', 'lavfi', '-i', 'color=c=black:s=320x320',
|
||||||
|
'-c:v', 'libwebp', '-pix_fmt', 'yuv420p', '-vframes', '1', generated_file,
|
||||||
|
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
file = test_data_dir + '/foo %d bar/foo_%d.{}'
|
||||||
|
initial_file = file.format('webp')
|
||||||
|
os.replace(generated_file, initial_file)
|
||||||
|
|
||||||
tests = (('webp', 'png'), ('png', 'jpg'))
|
tests = (('webp', 'png'), ('png', 'jpg'))
|
||||||
|
|
||||||
for inp, out in tests:
|
for inp, out in tests:
|
||||||
@@ -55,11 +68,13 @@ def test_escaping(self):
|
|||||||
if os.path.exists(out_file):
|
if os.path.exists(out_file):
|
||||||
os.remove(out_file)
|
os.remove(out_file)
|
||||||
pp.convert_thumbnail(file.format(inp), out)
|
pp.convert_thumbnail(file.format(inp), out)
|
||||||
assert os.path.exists(out_file)
|
self.assertTrue(os.path.exists(out_file))
|
||||||
|
|
||||||
for _, out in tests:
|
for _, out in tests:
|
||||||
os.remove(file.format(out))
|
os.remove(file.format(out))
|
||||||
|
|
||||||
|
os.remove(initial_file)
|
||||||
|
|
||||||
|
|
||||||
class TestExec(unittest.TestCase):
|
class TestExec(unittest.TestCase):
|
||||||
def test_parse_cmd(self):
|
def test_parse_cmd(self):
|
||||||
@@ -100,7 +115,7 @@ def _chapters(self, ends, titles):
|
|||||||
self.assertEqual(len(ends), len(titles))
|
self.assertEqual(len(ends), len(titles))
|
||||||
start = 0
|
start = 0
|
||||||
chapters = []
|
chapters = []
|
||||||
for e, t in zip(ends, titles):
|
for e, t in zip(ends, titles, strict=True):
|
||||||
chapters.append(self._chapter(start, e, t))
|
chapters.append(self._chapter(start, e, t))
|
||||||
start = e
|
start = e
|
||||||
return chapters
|
return chapters
|
||||||
@@ -610,3 +625,7 @@ def test_quote_for_concat_QuotesAtEnd(self):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
r"'special '\'' characters '\'' galore'\'\'\'",
|
r"'special '\'' characters '\'' galore'\'\'\'",
|
||||||
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
|||||||
71
test/test_pot/conftest.py
Normal file
71
test/test_pot/conftest.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import collections
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
|
||||||
|
class MockLogger(IEContentProviderLogger):
|
||||||
|
|
||||||
|
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.messages = collections.defaultdict(list)
|
||||||
|
|
||||||
|
def trace(self, message: str):
|
||||||
|
self.messages['trace'].append(message)
|
||||||
|
|
||||||
|
def debug(self, message: str):
|
||||||
|
self.messages['debug'].append(message)
|
||||||
|
|
||||||
|
def info(self, message: str):
|
||||||
|
self.messages['info'].append(message)
|
||||||
|
|
||||||
|
def warning(self, message: str, *, once=False):
|
||||||
|
self.messages['warning'].append(message)
|
||||||
|
|
||||||
|
def error(self, message: str):
|
||||||
|
self.messages['error'].append(message)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ie() -> InfoExtractor:
|
||||||
|
ydl = YoutubeDL()
|
||||||
|
return ydl.get_info_extractor('Youtube')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logger() -> MockLogger:
|
||||||
|
return MockLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request() -> PoTokenRequest:
|
||||||
|
return PoTokenRequest(
|
||||||
|
context=PoTokenContext.GVS,
|
||||||
|
innertube_context={'client': {'clientName': 'WEB'}},
|
||||||
|
innertube_host='youtube.com',
|
||||||
|
session_index=None,
|
||||||
|
player_url=None,
|
||||||
|
is_authenticated=False,
|
||||||
|
video_webpage=None,
|
||||||
|
|
||||||
|
visitor_data='example-visitor-data',
|
||||||
|
data_sync_id='example-data-sync-id',
|
||||||
|
video_id='example-video-id',
|
||||||
|
|
||||||
|
request_cookiejar=YoutubeDLCookieJar(),
|
||||||
|
request_proxy=None,
|
||||||
|
request_headers=HTTPHeaderDict(),
|
||||||
|
request_timeout=None,
|
||||||
|
request_source_address=None,
|
||||||
|
request_verify_tls=True,
|
||||||
|
|
||||||
|
bypass_cache=False,
|
||||||
|
)
|
||||||
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||||
|
|
||||||
|
|
||||||
|
class TestMemoryLRUPCS:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||||
|
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||||
|
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||||
|
|
||||||
|
def test_initialization(self, pcp):
|
||||||
|
assert pcp.PROVIDER_NAME == 'memory'
|
||||||
|
assert pcp.PROVIDER_VERSION == __version__
|
||||||
|
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcp.is_available()
|
||||||
|
|
||||||
|
def test_store_and_get(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_store_ignore_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_store_override_existing_key(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||||
|
assert len(pcp.cache) == 2
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||||
|
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
|
||||||
|
def test_store_ignore_expired_existing_key(self, pcp):
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_get_key_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_lru_eviction(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') == 'value1'
|
||||||
|
|
||||||
|
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key2') is None
|
||||||
|
|
||||||
|
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') is None
|
||||||
|
assert provider.get('key3') == 'value3'
|
||||||
|
assert provider.get('key4') == 'value4'
|
||||||
|
|
||||||
|
def test_delete(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
pcp.delete('key1')
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
|
||||||
|
def test_use_global_cache_default(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
def test_fail_max_size_change_global(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||||
|
initialize_global_cache(50)
|
||||||
|
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
|
||||||
|
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||||
|
assert memorylru_preference(pcp, pot_request) == 10000
|
||||||
52
test/test_pot/test_pot_builtin_utils.py
Normal file
52
test/test_pot/test_pot_builtin_utils.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetWebPoContentBinding:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||||
|
*[(client, context, is_authenticated, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, expected in [
|
||||||
|
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
])
|
||||||
|
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert get_webpo_content_binding(pot_request) == expected
|
||||||
|
|
||||||
|
def test_extract_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||||
|
|
||||||
|
def test_invalid_visitor_id(self, pot_request):
|
||||||
|
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||||
|
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_no_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'KIDiz6oG'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_invalid_base64(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'invalid-base64'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_gvs_video_id_binding_experiment(self, pot_request):
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
pot_request._gvs_bind_to_video_id = True
|
||||||
|
assert get_webpo_content_binding(pot_request) == ('example-video-id', ContentBindingType.VIDEO_ID)
|
||||||
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request(pot_request) -> PoTokenRequest:
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||||
|
return pot_request
|
||||||
|
|
||||||
|
|
||||||
|
class TestWebPoPCSP:
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||||
|
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
def test_init(self, ie, logger):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.PROVIDER_NAME == 'webpo'
|
||||||
|
assert pcs.PROVIDER_VERSION == __version__
|
||||||
|
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcs.is_available()
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||||
|
('ANDROID', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.PLAYER, False),
|
||||||
|
])
|
||||||
|
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert pcs.generate_cache_spec(pot_request) is None
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||||
|
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||||
|
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||||
|
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||||
|
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||||
|
|
||||||
|
])
|
||||||
|
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||||
|
pot_request.request_source_address = source_address
|
||||||
|
pot_request.request_proxy = request_proxy
|
||||||
|
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||||
|
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||||
|
|
||||||
|
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||||
|
# Should not bind to visitor id if setting is set to False
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
pot_request.is_authenticated = False
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||||
|
|
||||||
|
def test_default_ttl(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||||
|
|
||||||
|
def test_write_policy(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||||
1529
test/test_pot/test_pot_director.py
Normal file
1529
test/test_pot/test_pot_director.py
Normal file
File diff suppressed because it is too large
Load Diff
639
test/test_pot/test_pot_framework.py
Normal file
639
test/test_pot/test_pot_framework.py
Normal file
@@ -0,0 +1,639 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, configuration_arg
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
ExternalRequestFeature,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import (
|
||||||
|
PoTokenCacheProvider,
|
||||||
|
PoTokenCacheSpec,
|
||||||
|
PoTokenCacheSpecProvider,
|
||||||
|
CacheProviderWritePolicy,
|
||||||
|
)
|
||||||
|
|
||||||
|
import yt_dlp.extractor.youtube.pot.cache as cache
|
||||||
|
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenResponse,
|
||||||
|
PoTokenProvider,
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
provider_bug_report_message,
|
||||||
|
register_provider,
|
||||||
|
register_preference,
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_providers, _ptp_preferences, _pot_pcs_providers, _pot_cache_providers, _pot_cache_provider_preferences
|
||||||
|
|
||||||
|
|
||||||
|
class ExamplePTP(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
_SUPPORTED_CLIENTS = ('WEB',)
|
||||||
|
_SUPPORTED_CONTEXTS = (PoTokenContext.GVS, )
|
||||||
|
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_HTTP,
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_SOCKS5H,
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
return PoTokenResponse('example-token', expires_at=123)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheSpecProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
key_bindings={'field': 'example-key'},
|
||||||
|
default_ttl=60,
|
||||||
|
write_policy=CacheProviderWritePolicy.WRITE_FIRST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_fetch_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
provider = BarebonesProviderPTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_example_provider_success(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'Example'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
response = provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
assert response.po_token == 'example-token'
|
||||||
|
assert response.expires_at == 123
|
||||||
|
|
||||||
|
def test_provider_unsupported_context(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_client(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_proxy_scheme(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.request_proxy = 'socks4://example.com'
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||||
|
'schemes: http, socks5h',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'http://example.com'
|
||||||
|
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_ignore_external_request_features(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = None
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'socks5://example.com'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support setting source address',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.SOURCE_ADDRESS,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support ignoring TLS certificate failures',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.DISABLE_TLS_VERIFICATION,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_request_webpage(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=pot_request)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'example-user-agent'
|
||||||
|
assert sent_request.proxies == {'all': 'socks5://example-proxy.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is cookiejar
|
||||||
|
assert 'Requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_override(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar_request = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar_request
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
headers={'User-Agent': 'override-user-agent-override'},
|
||||||
|
proxies={'http': 'http://example-proxy-override.com'},
|
||||||
|
extensions={'cookiejar': YoutubeDLCookieJar()},
|
||||||
|
), pot_request=pot_request, note='Custom requesting webpage')
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'override-user-agent-override'
|
||||||
|
assert sent_request.proxies == {'http': 'http://example-proxy-override.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is not cookiejar_request
|
||||||
|
assert 'Custom requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_log(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), note=False)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert 'info' not in logger.messages
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_pot_request(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=None)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'invalid-suffix'
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_get_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_store_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_delete_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_is_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheSpecProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheSpecProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_supports_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCS(PoTokenCacheSpecProvider):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCS(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_barebones(self, ie, pot_request, logger):
|
||||||
|
class BarebonesProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
default_ttl=100,
|
||||||
|
key_bindings={},
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).default_ttl == 100
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).key_bindings == {}
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, pot_request, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheSpecProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(pot_request)
|
||||||
|
assert provider.generate_cache_spec(pot_request).key_bindings == {'field': 'example-key'}
|
||||||
|
assert provider.generate_cache_spec(pot_request).default_ttl == 60
|
||||||
|
assert provider.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenRequest:
|
||||||
|
def test_copy_request(self, pot_request):
|
||||||
|
copied_request = pot_request.copy()
|
||||||
|
|
||||||
|
assert copied_request is not pot_request
|
||||||
|
assert copied_request.context == pot_request.context
|
||||||
|
assert copied_request.innertube_context == pot_request.innertube_context
|
||||||
|
assert copied_request.innertube_context is not pot_request.innertube_context
|
||||||
|
copied_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
assert pot_request.innertube_context['client']['clientName'] != 'ANDROID'
|
||||||
|
assert copied_request.innertube_host == pot_request.innertube_host
|
||||||
|
assert copied_request.session_index == pot_request.session_index
|
||||||
|
assert copied_request.player_url == pot_request.player_url
|
||||||
|
assert copied_request.is_authenticated == pot_request.is_authenticated
|
||||||
|
assert copied_request.visitor_data == pot_request.visitor_data
|
||||||
|
assert copied_request.data_sync_id == pot_request.data_sync_id
|
||||||
|
assert copied_request.video_id == pot_request.video_id
|
||||||
|
assert copied_request.request_cookiejar is pot_request.request_cookiejar
|
||||||
|
assert copied_request.request_proxy == pot_request.request_proxy
|
||||||
|
assert copied_request.request_headers == pot_request.request_headers
|
||||||
|
assert copied_request.request_headers is not pot_request.request_headers
|
||||||
|
assert copied_request.request_timeout == pot_request.request_timeout
|
||||||
|
assert copied_request.request_source_address == pot_request.request_source_address
|
||||||
|
assert copied_request.request_verify_tls == pot_request.request_verify_tls
|
||||||
|
assert copied_request.bypass_cache == pot_request.bypass_cache
|
||||||
|
|
||||||
|
|
||||||
|
def test_provider_bug_report_message(ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message = provider_bug_report_message(provider)
|
||||||
|
assert message == '; please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message_before = provider_bug_report_message(provider, before='custom message!')
|
||||||
|
assert message_before == 'custom message! Please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_provider(ie):
|
||||||
|
|
||||||
|
@register_provider
|
||||||
|
class UnavailableProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
assert _pot_providers.value.get('UnavailableProvider') == UnavailableProviderPTP
|
||||||
|
_pot_providers.value.pop('UnavailableProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_pot_preference(ie):
|
||||||
|
before = len(_ptp_preferences.value)
|
||||||
|
|
||||||
|
@register_preference(ExamplePTP)
|
||||||
|
def unavailable_preference(provider: PoTokenProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_ptp_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider(ie):
|
||||||
|
|
||||||
|
@cache.register_provider
|
||||||
|
class UnavailableCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert _pot_cache_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCP
|
||||||
|
_pot_cache_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_spec(ie):
|
||||||
|
|
||||||
|
@cache.register_spec
|
||||||
|
class UnavailableCacheProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
assert _pot_pcs_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCSP
|
||||||
|
_pot_pcs_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_preference(ie):
|
||||||
|
before = len(_pot_cache_provider_preferences.value)
|
||||||
|
|
||||||
|
@cache.register_preference(ExampleCacheProviderPCP)
|
||||||
|
def unavailable_preference(provider: PoTokenCacheProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_pot_cache_provider_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_log_level(logger):
|
||||||
|
assert logger.LogLevel('INFO') == logger.LogLevel.INFO
|
||||||
|
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
||||||
|
|
||||||
|
|
||||||
|
def test_configuration_arg():
|
||||||
|
config = {'abc': ['123D'], 'xyz': ['456a', '789B']}
|
||||||
|
|
||||||
|
assert configuration_arg(config, 'abc') == ['123d']
|
||||||
|
assert configuration_arg(config, 'abc', default=['default']) == ['123d']
|
||||||
|
assert configuration_arg(config, 'ABC', default=['default']) == ['default']
|
||||||
|
assert configuration_arg(config, 'abc', casesense=True) == ['123D']
|
||||||
|
assert configuration_arg(config, 'xyz', casesense=False) == ['456a', '789b']
|
||||||
@@ -216,6 +216,8 @@ def handle(self):
|
|||||||
protocol = websockets.ServerProtocol()
|
protocol = websockets.ServerProtocol()
|
||||||
connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol, close_timeout=0)
|
connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol, close_timeout=0)
|
||||||
connection.handshake()
|
connection.handshake()
|
||||||
|
for message in connection:
|
||||||
|
if message == 'socks_info':
|
||||||
connection.send(json.dumps(self.socks_info))
|
connection.send(json.dumps(self.socks_info))
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
@@ -293,6 +295,7 @@ def ctx(request):
|
|||||||
('Websockets', 'ws'),
|
('Websockets', 'ws'),
|
||||||
('CurlCFFI', 'http'),
|
('CurlCFFI', 'http'),
|
||||||
], indirect=True)
|
], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||||
class TestSocks4Proxy:
|
class TestSocks4Proxy:
|
||||||
def test_socks4_no_auth(self, handler, ctx):
|
def test_socks4_no_auth(self, handler, ctx):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@@ -368,6 +371,7 @@ def test_timeout(self, handler, ctx):
|
|||||||
('Websockets', 'ws'),
|
('Websockets', 'ws'),
|
||||||
('CurlCFFI', 'http'),
|
('CurlCFFI', 'http'),
|
||||||
], indirect=True)
|
], indirect=True)
|
||||||
|
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||||
class TestSocks5Proxy:
|
class TestSocks5Proxy:
|
||||||
|
|
||||||
def test_socks5_no_auth(self, handler, ctx):
|
def test_socks5_no_auth(self, handler, ctx):
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
NRKTVIE,
|
NRKTVIE,
|
||||||
PBSIE,
|
PBSIE,
|
||||||
CeskaTelevizeIE,
|
CeskaTelevizeIE,
|
||||||
ComedyCentralIE,
|
|
||||||
DailymotionIE,
|
DailymotionIE,
|
||||||
DemocracynowIE,
|
DemocracynowIE,
|
||||||
LyndaIE,
|
LyndaIE,
|
||||||
@@ -23,7 +22,6 @@
|
|||||||
TedTalkIE,
|
TedTalkIE,
|
||||||
ThePlatformFeedIE,
|
ThePlatformFeedIE,
|
||||||
ThePlatformIE,
|
ThePlatformIE,
|
||||||
VikiIE,
|
|
||||||
VimeoIE,
|
VimeoIE,
|
||||||
WallaIE,
|
WallaIE,
|
||||||
YoutubeIE,
|
YoutubeIE,
|
||||||
@@ -280,23 +278,6 @@ def test_allsubtitles(self):
|
|||||||
self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
|
self.assertEqual(md5(subtitles['nl']), 'fc6435027572b63fb4ab143abd5ad3f4')
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
@unittest.skip('IE broken')
|
|
||||||
class TestMTVSubtitles(BaseTestSubtitles):
|
|
||||||
url = 'http://www.cc.com/video-clips/p63lk0/adam-devine-s-house-party-chasing-white-swans'
|
|
||||||
IE = ComedyCentralIE
|
|
||||||
|
|
||||||
def getInfoDict(self):
|
|
||||||
return super().getInfoDict()['entries'][0]
|
|
||||||
|
|
||||||
def test_allsubtitles(self):
|
|
||||||
self.DL.params['writesubtitles'] = True
|
|
||||||
self.DL.params['allsubtitles'] = True
|
|
||||||
subtitles = self.getSubtitles()
|
|
||||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
|
||||||
self.assertEqual(md5(subtitles['en']), '78206b8d8a0cfa9da64dc026eea48961')
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestNRKSubtitles(BaseTestSubtitles):
|
class TestNRKSubtitles(BaseTestSubtitles):
|
||||||
url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
|
url = 'http://tv.nrk.no/serie/ikke-gjoer-dette-hjemme/DMPV73000411/sesong-2/episode-1'
|
||||||
@@ -331,20 +312,6 @@ def test_subtitles_array_key(self):
|
|||||||
self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
|
self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
@unittest.skip('IE broken - DRM only')
|
|
||||||
class TestVikiSubtitles(BaseTestSubtitles):
|
|
||||||
url = 'http://www.viki.com/videos/1060846v-punch-episode-18'
|
|
||||||
IE = VikiIE
|
|
||||||
|
|
||||||
def test_allsubtitles(self):
|
|
||||||
self.DL.params['writesubtitles'] = True
|
|
||||||
self.DL.params['allsubtitles'] = True
|
|
||||||
subtitles = self.getSubtitles()
|
|
||||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
|
||||||
self.assertEqual(md5(subtitles['en']), '53cb083a5914b2d84ef1ab67b880d18a')
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestThePlatformSubtitles(BaseTestSubtitles):
|
class TestThePlatformSubtitles(BaseTestSubtitles):
|
||||||
# from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
|
# from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
|
||||||
|
|||||||
@@ -4,8 +4,23 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from yt_dlp.utils import dict_get, int_or_none, str_or_none
|
from yt_dlp.utils import (
|
||||||
from yt_dlp.utils.traversal import traverse_obj
|
ExtractorError,
|
||||||
|
determine_ext,
|
||||||
|
dict_get,
|
||||||
|
int_or_none,
|
||||||
|
join_nonempty,
|
||||||
|
str_or_none,
|
||||||
|
)
|
||||||
|
from yt_dlp.utils.traversal import (
|
||||||
|
find_element,
|
||||||
|
find_elements,
|
||||||
|
require,
|
||||||
|
subs_list_to_dict,
|
||||||
|
traverse_obj,
|
||||||
|
trim_str,
|
||||||
|
unpack,
|
||||||
|
)
|
||||||
|
|
||||||
_TEST_DATA = {
|
_TEST_DATA = {
|
||||||
100: 100,
|
100: 100,
|
||||||
@@ -24,6 +39,14 @@
|
|||||||
'dict': {},
|
'dict': {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_TEST_HTML = '''<html><body>
|
||||||
|
<div class="a">1</div>
|
||||||
|
<div class="a" id="x" custom="z">2</div>
|
||||||
|
<div class="b" data-id="y" custom="z">3</div>
|
||||||
|
<p class="a">4</p>
|
||||||
|
<p id="d" custom="e">5</p>
|
||||||
|
</body></html>'''
|
||||||
|
|
||||||
|
|
||||||
class TestTraversal:
|
class TestTraversal:
|
||||||
def test_traversal_base(self):
|
def test_traversal_base(self):
|
||||||
@@ -393,18 +416,8 @@ def test_traversal_unbranching(self):
|
|||||||
'`any` should allow further branching'
|
'`any` should allow further branching'
|
||||||
|
|
||||||
def test_traversal_morsel(self):
|
def test_traversal_morsel(self):
|
||||||
values = {
|
|
||||||
'expires': 'a',
|
|
||||||
'path': 'b',
|
|
||||||
'comment': 'c',
|
|
||||||
'domain': 'd',
|
|
||||||
'max-age': 'e',
|
|
||||||
'secure': 'f',
|
|
||||||
'httponly': 'g',
|
|
||||||
'version': 'h',
|
|
||||||
'samesite': 'i',
|
|
||||||
}
|
|
||||||
morsel = http.cookies.Morsel()
|
morsel = http.cookies.Morsel()
|
||||||
|
values = dict(zip(morsel, 'abcdefghijklmnop', strict=False))
|
||||||
morsel.set('item_key', 'item_value', 'coded_value')
|
morsel.set('item_key', 'item_value', 'coded_value')
|
||||||
morsel.update(values)
|
morsel.update(values)
|
||||||
values['key'] = 'item_key'
|
values['key'] = 'item_key'
|
||||||
@@ -420,6 +433,186 @@ def test_traversal_morsel(self):
|
|||||||
assert traverse_obj(morsel, [(None,), any]) == morsel, \
|
assert traverse_obj(morsel, [(None,), any]) == morsel, \
|
||||||
'Morsel should not be implicitly changed to dict on usage'
|
'Morsel should not be implicitly changed to dict on usage'
|
||||||
|
|
||||||
|
def test_traversal_filter(self):
|
||||||
|
data = [None, False, True, 0, 1, 0.0, 1.1, '', 'str', {}, {0: 0}, [], [1]]
|
||||||
|
|
||||||
|
assert traverse_obj(data, [..., filter]) == [True, 1, 1.1, 'str', {0: 0}, [1]], \
|
||||||
|
'`filter` should filter falsy values'
|
||||||
|
|
||||||
|
|
||||||
|
class TestTraversalHelpers:
|
||||||
|
def test_traversal_require(self):
|
||||||
|
with pytest.raises(ExtractorError):
|
||||||
|
traverse_obj(_TEST_DATA, ['None', {require('value')}])
|
||||||
|
assert traverse_obj(_TEST_DATA, ['str', {require('value')}]) == 'str', \
|
||||||
|
'`require` should pass through non `None` values'
|
||||||
|
|
||||||
|
def test_subs_list_to_dict(self):
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 'de', 'url': 'https://example.com/subs/de.vtt'},
|
||||||
|
{'name': 'en', 'url': 'https://example.com/subs/en1.ass'},
|
||||||
|
{'name': 'en', 'url': 'https://example.com/subs/en2.ass'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'url': 'url',
|
||||||
|
}, all, {subs_list_to_dict}]) == {
|
||||||
|
'de': [{'url': 'https://example.com/subs/de.vtt'}],
|
||||||
|
'en': [
|
||||||
|
{'url': 'https://example.com/subs/en1.ass'},
|
||||||
|
{'url': 'https://example.com/subs/en2.ass'},
|
||||||
|
],
|
||||||
|
}, 'function should build subtitle dict from list of subtitles'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 'de', 'url': 'https://example.com/subs/de.ass'},
|
||||||
|
{'name': 'de'},
|
||||||
|
{'name': 'en', 'content': 'content'},
|
||||||
|
{'url': 'https://example.com/subs/en'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'data': 'content',
|
||||||
|
'url': 'url',
|
||||||
|
}, all, {subs_list_to_dict(lang=None)}]) == {
|
||||||
|
'de': [{'url': 'https://example.com/subs/de.ass'}],
|
||||||
|
'en': [{'data': 'content'}],
|
||||||
|
}, 'subs with mandatory items missing should be filtered'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'url': 'https://example.com/subs/de.ass', 'name': 'de'},
|
||||||
|
{'url': 'https://example.com/subs/en', 'name': 'en'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'ext': ['url', {determine_ext(default_ext=None)}],
|
||||||
|
'url': 'url',
|
||||||
|
}, all, {subs_list_to_dict(ext='ext')}]) == {
|
||||||
|
'de': [{'url': 'https://example.com/subs/de.ass', 'ext': 'ass'}],
|
||||||
|
'en': [{'url': 'https://example.com/subs/en', 'ext': 'ext'}],
|
||||||
|
}, '`ext` should set default ext but leave existing value untouched'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 'en', 'url': 'https://example.com/subs/en2', 'prio': True},
|
||||||
|
{'name': 'en', 'url': 'https://example.com/subs/en1', 'prio': False},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'quality': ['prio', {int}],
|
||||||
|
'url': 'url',
|
||||||
|
}, all, {subs_list_to_dict(ext='ext')}]) == {'en': [
|
||||||
|
{'url': 'https://example.com/subs/en1', 'ext': 'ext'},
|
||||||
|
{'url': 'https://example.com/subs/en2', 'ext': 'ext'},
|
||||||
|
]}, '`quality` key should sort subtitle list accordingly'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 'de', 'url': 'https://example.com/subs/de.ass'},
|
||||||
|
{'name': 'de'},
|
||||||
|
{'name': 'en', 'content': 'content'},
|
||||||
|
{'url': 'https://example.com/subs/en'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'url': 'url',
|
||||||
|
'data': 'content',
|
||||||
|
}, all, {subs_list_to_dict(lang='en')}]) == {
|
||||||
|
'de': [{'url': 'https://example.com/subs/de.ass'}],
|
||||||
|
'en': [
|
||||||
|
{'data': 'content'},
|
||||||
|
{'url': 'https://example.com/subs/en'},
|
||||||
|
],
|
||||||
|
}, 'optionally provided lang should be used if no id available'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 1, 'url': 'https://example.com/subs/de1'},
|
||||||
|
{'name': {}, 'url': 'https://example.com/subs/de2'},
|
||||||
|
{'name': 'de', 'ext': 1, 'url': 'https://example.com/subs/de3'},
|
||||||
|
{'name': 'de', 'ext': {}, 'url': 'https://example.com/subs/de4'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'url': 'url',
|
||||||
|
'ext': 'ext',
|
||||||
|
}, all, {subs_list_to_dict(lang=None)}]) == {
|
||||||
|
'de': [
|
||||||
|
{'url': 'https://example.com/subs/de3'},
|
||||||
|
{'url': 'https://example.com/subs/de4'},
|
||||||
|
],
|
||||||
|
}, 'non str types should be ignored for id and ext'
|
||||||
|
assert traverse_obj([
|
||||||
|
{'name': 1, 'url': 'https://example.com/subs/de1'},
|
||||||
|
{'name': {}, 'url': 'https://example.com/subs/de2'},
|
||||||
|
{'name': 'de', 'ext': 1, 'url': 'https://example.com/subs/de3'},
|
||||||
|
{'name': 'de', 'ext': {}, 'url': 'https://example.com/subs/de4'},
|
||||||
|
], [..., {
|
||||||
|
'id': 'name',
|
||||||
|
'url': 'url',
|
||||||
|
'ext': 'ext',
|
||||||
|
}, all, {subs_list_to_dict(lang='de')}]) == {
|
||||||
|
'de': [
|
||||||
|
{'url': 'https://example.com/subs/de1'},
|
||||||
|
{'url': 'https://example.com/subs/de2'},
|
||||||
|
{'url': 'https://example.com/subs/de3'},
|
||||||
|
{'url': 'https://example.com/subs/de4'},
|
||||||
|
],
|
||||||
|
}, 'non str types should be replaced by default id'
|
||||||
|
|
||||||
|
def test_trim_str(self):
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
trim_str('positional')
|
||||||
|
|
||||||
|
assert callable(trim_str(start='a'))
|
||||||
|
assert trim_str(start='ab')('abc') == 'c'
|
||||||
|
assert trim_str(end='bc')('abc') == 'a'
|
||||||
|
assert trim_str(start='a', end='c')('abc') == 'b'
|
||||||
|
assert trim_str(start='ab', end='c')('abc') == ''
|
||||||
|
assert trim_str(start='a', end='bc')('abc') == ''
|
||||||
|
assert trim_str(start='ab', end='bc')('abc') == ''
|
||||||
|
assert trim_str(start='abc', end='abc')('abc') == ''
|
||||||
|
assert trim_str(start='', end='')('abc') == 'abc'
|
||||||
|
|
||||||
|
def test_unpack(self):
|
||||||
|
assert unpack(lambda *x: ''.join(map(str, x)))([1, 2, 3]) == '123'
|
||||||
|
assert unpack(join_nonempty)([1, 2, 3]) == '1-2-3'
|
||||||
|
assert unpack(join_nonempty, delim=' ')([1, 2, 3]) == '1 2 3'
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
unpack(join_nonempty)()
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
unpack()
|
||||||
|
|
||||||
|
def test_find_element(self):
|
||||||
|
for improper_kwargs in [
|
||||||
|
dict(attr='data-id'),
|
||||||
|
dict(value='y'),
|
||||||
|
dict(attr='data-id', value='y', cls='a'),
|
||||||
|
dict(attr='data-id', value='y', id='x'),
|
||||||
|
dict(cls='a', id='x'),
|
||||||
|
dict(cls='a', tag='p'),
|
||||||
|
dict(cls='[ab]', regex=True),
|
||||||
|
]:
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
find_element(**improper_kwargs)(_TEST_HTML)
|
||||||
|
|
||||||
|
assert find_element(cls='a')(_TEST_HTML) == '1'
|
||||||
|
assert find_element(cls='a', html=True)(_TEST_HTML) == '<div class="a">1</div>'
|
||||||
|
assert find_element(id='x')(_TEST_HTML) == '2'
|
||||||
|
assert find_element(id='[ex]')(_TEST_HTML) is None
|
||||||
|
assert find_element(id='[ex]', regex=True)(_TEST_HTML) == '2'
|
||||||
|
assert find_element(id='x', html=True)(_TEST_HTML) == '<div class="a" id="x" custom="z">2</div>'
|
||||||
|
assert find_element(attr='data-id', value='y')(_TEST_HTML) == '3'
|
||||||
|
assert find_element(attr='data-id', value='y(?:es)?')(_TEST_HTML) is None
|
||||||
|
assert find_element(attr='data-id', value='y(?:es)?', regex=True)(_TEST_HTML) == '3'
|
||||||
|
assert find_element(
|
||||||
|
attr='data-id', value='y', html=True)(_TEST_HTML) == '<div class="b" data-id="y" custom="z">3</div>'
|
||||||
|
|
||||||
|
def test_find_elements(self):
|
||||||
|
for improper_kwargs in [
|
||||||
|
dict(tag='p'),
|
||||||
|
dict(attr='data-id'),
|
||||||
|
dict(value='y'),
|
||||||
|
dict(attr='data-id', value='y', cls='a'),
|
||||||
|
dict(cls='a', tag='div'),
|
||||||
|
dict(cls='[ab]', regex=True),
|
||||||
|
]:
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
find_elements(**improper_kwargs)(_TEST_HTML)
|
||||||
|
|
||||||
|
assert find_elements(cls='a')(_TEST_HTML) == ['1', '2', '4']
|
||||||
|
assert find_elements(cls='a', html=True)(_TEST_HTML) == [
|
||||||
|
'<div class="a">1</div>', '<div class="a" id="x" custom="z">2</div>', '<p class="a">4</p>']
|
||||||
|
assert find_elements(attr='custom', value='z')(_TEST_HTML) == ['2', '3']
|
||||||
|
assert find_elements(attr='custom', value='[ez]')(_TEST_HTML) == []
|
||||||
|
assert find_elements(attr='custom', value='[ez]', regex=True)(_TEST_HTML) == ['2', '3', '5']
|
||||||
|
|
||||||
|
|
||||||
class TestDictGet:
|
class TestDictGet:
|
||||||
def test_dict_get(self):
|
def test_dict_get(self):
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
|
|
||||||
from test.helper import FakeYDL, report_warning
|
from test.helper import FakeYDL, report_warning
|
||||||
from yt_dlp.update import UpdateInfo, Updater
|
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
|
||||||
|
|
||||||
|
|
||||||
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||||
@@ -82,16 +82,36 @@
|
|||||||
lock 2022.08.18.36 .+ Python 3\.6
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lock 2024.10.22 py2exe .+
|
||||||
|
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lock 2024.10.22 zip Python 3\.8
|
||||||
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
'''
|
'''
|
||||||
|
|
||||||
TEST_LOCKFILE_V2_TMPL = r'''%s
|
TEST_LOCKFILE_V2_TMPL = r'''%s
|
||||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
'''
|
'''
|
||||||
|
|
||||||
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
TEST_LOCKFILE_V2 = TEST_LOCKFILE_V2_TMPL % TEST_LOCKFILE_COMMENT
|
||||||
@@ -145,43 +165,80 @@ def test(lockfile, identifier, input_tag, expect_tag, exact=False, repo='yt-dlp/
|
|||||||
for lockfile in (TEST_LOCKFILE_V1, TEST_LOCKFILE_V2, TEST_LOCKFILE_ACTUAL, TEST_LOCKFILE_FORK):
|
for lockfile in (TEST_LOCKFILE_V1, TEST_LOCKFILE_V2, TEST_LOCKFILE_ACTUAL, TEST_LOCKFILE_FORK):
|
||||||
# Normal operation
|
# Normal operation
|
||||||
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31')
|
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31')
|
||||||
test(lockfile, 'zip stable Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
|
test(lockfile, 'zip Python 3.12.0', '2023.12.31', '2023.12.31', exact=True)
|
||||||
# Python 3.6 --update should update only to its lock
|
# py2exe should never update beyond 2024.10.22
|
||||||
|
test(lockfile, 'py2exe Python 3.8', '2025.01.01', '2024.10.22')
|
||||||
|
test(lockfile, 'py2exe Python 3.8', '2025.01.01', None, exact=True)
|
||||||
|
# Python 3.6 --update should update only to the py3.6 lock
|
||||||
test(lockfile, 'zip Python 3.6.0', '2023.11.16', '2022.08.18.36')
|
test(lockfile, 'zip Python 3.6.0', '2023.11.16', '2022.08.18.36')
|
||||||
# --update-to an exact version later than the lock should return None
|
# Python 3.6 --update-to an exact version later than the py3.6 lock should return None
|
||||||
test(lockfile, 'zip stable Python 3.6.0', '2023.11.16', None, exact=True)
|
test(lockfile, 'zip Python 3.6.0', '2023.11.16', None, exact=True)
|
||||||
# Python 3.7 should be able to update to its lock
|
# Python 3.7 should be able to update to the py3.7 lock
|
||||||
test(lockfile, 'zip Python 3.7.0', '2023.11.16', '2023.11.16')
|
test(lockfile, 'zip Python 3.7.0', '2023.11.16', '2023.11.16')
|
||||||
test(lockfile, 'zip stable Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
|
test(lockfile, 'zip Python 3.7.1', '2023.11.16', '2023.11.16', exact=True)
|
||||||
# Non-win_x86_exe builds on py3.7 must be locked
|
# Non-win_x86_exe builds on py3.7 must be locked at py3.7 lock
|
||||||
test(lockfile, 'zip Python 3.7.1', '2023.12.31', '2023.11.16')
|
test(lockfile, 'zip Python 3.7.1', '2023.12.31', '2023.11.16')
|
||||||
test(lockfile, 'zip stable Python 3.7.1', '2023.12.31', None, exact=True)
|
test(lockfile, 'zip Python 3.7.1', '2023.12.31', None, exact=True)
|
||||||
test( # Windows Vista w/ win_x86_exe must be locked
|
# Python 3.8 should only update to the py3.8 lock
|
||||||
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
test(lockfile, 'zip Python 3.8.10', '2025.01.01', '2024.10.22')
|
||||||
|
test(lockfile, 'zip Python 3.8.110', '2025.01.01', None, exact=True)
|
||||||
|
test( # Windows Vista w/ win_x86_exe must be locked at Vista lock
|
||||||
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||||
'2023.12.31', '2023.11.16')
|
'2023.12.31', '2023.11.16')
|
||||||
test( # Windows 2008Server w/ win_x86_exe must be locked
|
test( # Windows 2008Server w/ win_x86_exe must be locked at Vista lock
|
||||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-2008Server',
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-2008Server',
|
||||||
'2023.12.31', None, exact=True)
|
'2023.12.31', None, exact=True)
|
||||||
test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond lock
|
test( # Windows 7 w/ win_x86_exe py3.7 build should be able to update beyond py3.7 lock
|
||||||
lockfile, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||||
'2023.12.31', '2023.12.31')
|
'2023.12.31', '2023.12.31', exact=True)
|
||||||
test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond lock
|
test( # Windows 7 win_x86_exe should only update to Win7 lock
|
||||||
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||||
|
'2025.01.01', '2024.10.22')
|
||||||
|
test( # Windows 2008ServerR2 win_exe should only update to Win7 lock
|
||||||
|
lockfile, 'win_exe Python 3.8.10 (CPython x86 32bit) - Windows-2008ServerR2',
|
||||||
|
'2025.12.31', '2024.10.22')
|
||||||
|
test( # Windows 8.1 w/ '2008Server' in platform string should be able to update beyond py3.7 lock
|
||||||
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-post2008Server-6.2.9200',
|
lockfile, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-post2008Server-6.2.9200',
|
||||||
'2023.12.31', '2023.12.31', exact=True)
|
'2023.12.31', '2023.12.31', exact=True)
|
||||||
|
test( # win_exe built w/Python 3.8 on Windows>=8 should be able to update beyond py3.8 lock
|
||||||
|
lockfile, 'win_exe Python 3.8.10 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0',
|
||||||
|
'2025.01.01', '2025.01.01', exact=True)
|
||||||
|
test( # linux_armv7l_exe w/glibc2.7 should only update to glibc<2.31 lock
|
||||||
|
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 32bit) - Linux-6.5.0-1025-azure-armv7l-with-glibc2.7',
|
||||||
|
'2025.01.01', '2024.10.22')
|
||||||
|
test( # linux_armv7l_exe w/Python 3.8 and glibc>=2.31 should be able to update beyond py3.8 and glibc<2.31 locks
|
||||||
|
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 32bit) - Linux-6.5.0-1025-azure-armv7l-with-glibc2.31',
|
||||||
|
'2025.01.01', '2025.01.01')
|
||||||
|
test( # linux_armv7l_exe w/glibc2.30 should only update to glibc<2.31 lock
|
||||||
|
lockfile, 'linux_armv7l_exe Python 3.8.0 (CPython armv7l 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.30 (OpenSSL',
|
||||||
|
'2025.01.01', '2024.10.22')
|
||||||
|
test( # linux_aarch64_exe w/glibc2.17 should only update to glibc<2.31 lock
|
||||||
|
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.17',
|
||||||
|
'2025.01.01', '2024.10.22')
|
||||||
|
test( # linux_aarch64_exe w/glibc2.40 and glibc>=2.31 should be able to update beyond py3.8 and glibc<2.31 locks
|
||||||
|
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.40',
|
||||||
|
'2025.01.01', '2025.01.01')
|
||||||
|
test( # linux_aarch64_exe w/glibc2.3 should only update to glibc<2.31 lock
|
||||||
|
lockfile, 'linux_aarch64_exe Python 3.8.0 (CPython aarch64 64bit) - Linux-6.5.0-1025-azure-aarch64-with-glibc2.3 (OpenSSL',
|
||||||
|
'2025.01.01', '2024.10.22')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.11', '2025.08.11', exact=True)
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', '2025.08.11')
|
||||||
|
test(lockfile, 'darwin_legacy_exe Python 3.10.5', '2025.08.12', None, exact=True)
|
||||||
|
|
||||||
# Forks can block updates to non-numeric tags rather than lock
|
# Forks can block updates to non-numeric tags rather than lock
|
||||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
||||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
||||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
||||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp', exact=True)
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp', exact=True)
|
||||||
test(
|
test(
|
||||||
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
TEST_LOCKFILE_FORK, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-Vista-6.0.6003-SP2',
|
||||||
'pr1234', None, repo='fork/yt-dlp')
|
'pr1234', None, repo='fork/yt-dlp')
|
||||||
test(
|
test(
|
||||||
TEST_LOCKFILE_FORK, 'win_x86_exe stable Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
TEST_LOCKFILE_FORK, 'win_x86_exe Python 3.7.9 (CPython x86 32bit) - Windows-7-6.1.7601-SP1',
|
||||||
'2023.12.31', '2023.12.31', repo='fork/yt-dlp')
|
'2023.12.31', '2023.12.31', repo='fork/yt-dlp')
|
||||||
test(TEST_LOCKFILE_FORK, 'zip Python 3.11.2', 'pr9999', None, repo='fork/yt-dlp', exact=True)
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.11.2', 'pr9999', None, repo='fork/yt-dlp', exact=True)
|
||||||
test(TEST_LOCKFILE_FORK, 'zip stable Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
test(TEST_LOCKFILE_FORK, 'zip Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
||||||
|
|
||||||
def test_query_update(self):
|
def test_query_update(self):
|
||||||
ydl = FakeYDL()
|
ydl = FakeYDL()
|
||||||
@@ -223,6 +280,26 @@ def test(target, expected, current_version=None, current_commit=None, identifier
|
|||||||
test('testing', None, current_commit='9' * 40)
|
test('testing', None, current_commit='9' * 40)
|
||||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||||
|
|
||||||
|
def test_make_label(self):
|
||||||
|
STABLE_REPO = UPDATE_SOURCES['stable']
|
||||||
|
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
|
||||||
|
MASTER_REPO = UPDATE_SOURCES['master']
|
||||||
|
|
||||||
|
for inputs, expected in [
|
||||||
|
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
|
||||||
|
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
|
||||||
|
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
|
||||||
|
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
|
||||||
|
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
|
||||||
|
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
|
||||||
|
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
|
||||||
|
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
|
||||||
|
]:
|
||||||
|
result = _make_label(*inputs)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expected,
|
||||||
|
f'{inputs!r} returned {result!r} instead of {expected!r}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -3,24 +3,26 @@
|
|||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
|
||||||
import warnings
|
|
||||||
import datetime as dt
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import datetime as dt
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import ntpath
|
||||||
|
import pickle
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import unittest
|
||||||
|
import unittest.mock
|
||||||
|
import warnings
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import (
|
||||||
compat_etree_fromstring,
|
compat_etree_fromstring,
|
||||||
compat_HTMLParseError,
|
compat_HTMLParseError,
|
||||||
compat_os_name,
|
|
||||||
)
|
)
|
||||||
from yt_dlp.utils import (
|
from yt_dlp.utils import (
|
||||||
Config,
|
Config,
|
||||||
@@ -48,7 +50,6 @@
|
|||||||
dfxp2srt,
|
dfxp2srt,
|
||||||
encode_base_n,
|
encode_base_n,
|
||||||
encode_compat_str,
|
encode_compat_str,
|
||||||
encodeFilename,
|
|
||||||
expand_path,
|
expand_path,
|
||||||
extract_attributes,
|
extract_attributes,
|
||||||
extract_basic_auth,
|
extract_basic_auth,
|
||||||
@@ -68,10 +69,11 @@
|
|||||||
get_elements_html_by_class,
|
get_elements_html_by_class,
|
||||||
get_elements_text_and_html_by_attribute,
|
get_elements_text_and_html_by_attribute,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
intlist_to_bytes,
|
|
||||||
iri_to_uri,
|
iri_to_uri,
|
||||||
is_html,
|
is_html,
|
||||||
js_to_json,
|
js_to_json,
|
||||||
|
jwt_decode_hs256,
|
||||||
|
jwt_encode,
|
||||||
limit_length,
|
limit_length,
|
||||||
locked_file,
|
locked_file,
|
||||||
lowercase_escape,
|
lowercase_escape,
|
||||||
@@ -100,11 +102,13 @@
|
|||||||
remove_start,
|
remove_start,
|
||||||
render_table,
|
render_table,
|
||||||
replace_extension,
|
replace_extension,
|
||||||
|
datetime_round,
|
||||||
rot47,
|
rot47,
|
||||||
sanitize_filename,
|
sanitize_filename,
|
||||||
sanitize_path,
|
sanitize_path,
|
||||||
sanitize_url,
|
sanitize_url,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
|
strftime_or_none,
|
||||||
smuggle_url,
|
smuggle_url,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
strip_jsonp,
|
strip_jsonp,
|
||||||
@@ -220,10 +224,8 @@ def test_sanitize_ids(self):
|
|||||||
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
|
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
|
||||||
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
|
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
|
||||||
|
|
||||||
|
@unittest.mock.patch('sys.platform', 'win32')
|
||||||
def test_sanitize_path(self):
|
def test_sanitize_path(self):
|
||||||
if sys.platform != 'win32':
|
|
||||||
return
|
|
||||||
|
|
||||||
self.assertEqual(sanitize_path('abc'), 'abc')
|
self.assertEqual(sanitize_path('abc'), 'abc')
|
||||||
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
|
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
|
||||||
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
|
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
|
||||||
@@ -250,11 +252,26 @@ def test_sanitize_path(self):
|
|||||||
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
|
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
|
||||||
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
|
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
|
||||||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||||
|
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||||
|
|
||||||
self.assertEqual(sanitize_path('../abc'), '..\\abc')
|
for test, expected in [
|
||||||
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
|
('C:\\', 'C:\\'),
|
||||||
self.assertEqual(sanitize_path('./abc'), 'abc')
|
('../abc', '..\\abc'),
|
||||||
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
|
('../../abc', '..\\..\\abc'),
|
||||||
|
('./abc', 'abc'),
|
||||||
|
('./../abc', '..\\abc'),
|
||||||
|
('\\abc', '\\abc'),
|
||||||
|
('C:abc', 'C:abc'),
|
||||||
|
('C:abc\\..\\', 'C:'),
|
||||||
|
('C:abc\\..\\def\\..\\..\\', 'C:..'),
|
||||||
|
('C:\\abc\\xyz///..\\def\\', 'C:\\abc\\def'),
|
||||||
|
('abc/../', '.'),
|
||||||
|
('./abc/../', '.'),
|
||||||
|
]:
|
||||||
|
result = sanitize_path(test)
|
||||||
|
assert result == expected, f'{test} was incorrectly resolved'
|
||||||
|
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||||
|
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||||
|
|
||||||
def test_sanitize_url(self):
|
def test_sanitize_url(self):
|
||||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||||
@@ -337,11 +354,13 @@ def test_remove_start(self):
|
|||||||
self.assertEqual(remove_start(None, 'A - '), None)
|
self.assertEqual(remove_start(None, 'A - '), None)
|
||||||
self.assertEqual(remove_start('A - B', 'A - '), 'B')
|
self.assertEqual(remove_start('A - B', 'A - '), 'B')
|
||||||
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
|
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
|
||||||
|
self.assertEqual(remove_start('non-empty', ''), 'non-empty')
|
||||||
|
|
||||||
def test_remove_end(self):
|
def test_remove_end(self):
|
||||||
self.assertEqual(remove_end(None, ' - B'), None)
|
self.assertEqual(remove_end(None, ' - B'), None)
|
||||||
self.assertEqual(remove_end('A - B', ' - B'), 'A')
|
self.assertEqual(remove_end('A - B', ' - B'), 'A')
|
||||||
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
|
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
|
||||||
|
self.assertEqual(remove_end('non-empty', ''), 'non-empty')
|
||||||
|
|
||||||
def test_remove_quotes(self):
|
def test_remove_quotes(self):
|
||||||
self.assertEqual(remove_quotes(None), None)
|
self.assertEqual(remove_quotes(None), None)
|
||||||
@@ -386,6 +405,25 @@ def test_datetime_from_str(self):
|
|||||||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||||
|
|
||||||
|
def test_datetime_round(self):
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||||
|
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||||
|
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
def test_strftime_or_none(self):
|
||||||
|
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||||
|
self.assertEqual(strftime_or_none(0), '19700101')
|
||||||
|
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||||
|
# Throws OverflowError
|
||||||
|
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||||
|
|
||||||
def test_daterange(self):
|
def test_daterange(self):
|
||||||
_20century = DateRange('19000101', '20000101')
|
_20century = DateRange('19000101', '20000101')
|
||||||
self.assertFalse('17890714' in _20century)
|
self.assertFalse('17890714' in _20century)
|
||||||
@@ -444,6 +482,8 @@ def test_unified_timestamps(self):
|
|||||||
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
|
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
|
||||||
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
||||||
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
||||||
|
self.assertEqual(unified_timestamp('Sunday, 26 Nov 2006, 19:00'), 1164567600)
|
||||||
|
self.assertEqual(unified_timestamp('wed, aug 16, 2008, 12:00pm'), 1218931200)
|
||||||
|
|
||||||
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
||||||
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
||||||
@@ -555,10 +595,10 @@ def test_smuggle_url(self):
|
|||||||
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
|
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
|
||||||
|
|
||||||
def test_shell_quote(self):
|
def test_shell_quote(self):
|
||||||
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
|
args = ['ffmpeg', '-i', 'ñ€ß\'.mp4']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
shell_quote(args),
|
shell_quote(args),
|
||||||
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
|
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if os.name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
|
||||||
|
|
||||||
def test_float_or_none(self):
|
def test_float_or_none(self):
|
||||||
self.assertEqual(float_or_none('42.42'), 42.42)
|
self.assertEqual(float_or_none('42.42'), 42.42)
|
||||||
@@ -636,6 +676,8 @@ def test_url_or_none(self):
|
|||||||
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
|
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
|
||||||
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
|
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
|
||||||
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
|
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
|
||||||
|
self.assertEqual(url_or_none('ws://foo.de'), 'ws://foo.de')
|
||||||
|
self.assertEqual(url_or_none('wss://foo.de'), 'wss://foo.de')
|
||||||
|
|
||||||
def test_parse_age_limit(self):
|
def test_parse_age_limit(self):
|
||||||
self.assertEqual(parse_age_limit(None), None)
|
self.assertEqual(parse_age_limit(None), None)
|
||||||
@@ -919,6 +961,11 @@ def test_parse_codecs(self):
|
|||||||
'acodec': 'none',
|
'acodec': 'none',
|
||||||
'dynamic_range': 'HDR10',
|
'dynamic_range': 'HDR10',
|
||||||
})
|
})
|
||||||
|
self.assertEqual(parse_codecs('vp09.02.50.10.01.09.18.09.00'), {
|
||||||
|
'vcodec': 'vp09.02.50.10.01.09.18.09.00',
|
||||||
|
'acodec': 'none',
|
||||||
|
'dynamic_range': 'HDR10',
|
||||||
|
})
|
||||||
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
||||||
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
||||||
'acodec': 'none',
|
'acodec': 'none',
|
||||||
@@ -1232,6 +1279,7 @@ def test_js_to_json_edgecases(self):
|
|||||||
def test_js_to_json_malformed(self):
|
def test_js_to_json_malformed(self):
|
||||||
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
||||||
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
||||||
|
self.assertEqual(js_to_json('{a: `${e("")}`}'), '{"a": "\\"e\\"(\\"\\")"}')
|
||||||
|
|
||||||
def test_js_to_json_template_literal(self):
|
def test_js_to_json_template_literal(self):
|
||||||
self.assertEqual(js_to_json('`Hello ${name}`', {'name': '"world"'}), '"Hello world"')
|
self.assertEqual(js_to_json('`Hello ${name}`', {'name': '"world"'}), '"Hello world"')
|
||||||
@@ -1293,15 +1341,10 @@ def test_clean_html(self):
|
|||||||
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
|
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
|
||||||
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
|
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
|
||||||
|
|
||||||
def test_intlist_to_bytes(self):
|
|
||||||
self.assertEqual(
|
|
||||||
intlist_to_bytes([0, 1, 127, 128, 255]),
|
|
||||||
b'\x00\x01\x7f\x80\xff')
|
|
||||||
|
|
||||||
def test_args_to_str(self):
|
def test_args_to_str(self):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
|
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
|
||||||
'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""',
|
'foo ba/r -baz \'2 be\' \'\'' if os.name != 'nt' else 'foo ba/r -baz "2 be" ""',
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_parse_filesize(self):
|
def test_parse_filesize(self):
|
||||||
@@ -1347,6 +1390,7 @@ def test_parse_resolution(self):
|
|||||||
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
||||||
self.assertEqual(parse_resolution('ep1x2'), {})
|
self.assertEqual(parse_resolution('ep1x2'), {})
|
||||||
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
||||||
|
self.assertEqual(parse_resolution('1920w', lenient=True), {'width': 1920})
|
||||||
|
|
||||||
def test_parse_bitrate(self):
|
def test_parse_bitrate(self):
|
||||||
self.assertEqual(parse_bitrate(None), None)
|
self.assertEqual(parse_bitrate(None), None)
|
||||||
@@ -1819,7 +1863,7 @@ def test_get_elements_text_and_html_by_attribute(self):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
||||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES, strict=True)))
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||||
|
|
||||||
@@ -2060,21 +2104,26 @@ def test_http_header_dict(self):
|
|||||||
headers = HTTPHeaderDict()
|
headers = HTTPHeaderDict()
|
||||||
headers['ytdl-test'] = b'0'
|
headers['ytdl-test'] = b'0'
|
||||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '0')])
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '0')])
|
||||||
|
self.assertEqual(list(headers.sensitive().items()), [('ytdl-test', '0')])
|
||||||
headers['ytdl-test'] = 1
|
headers['ytdl-test'] = 1
|
||||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '1')])
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '1')])
|
||||||
|
self.assertEqual(list(headers.sensitive().items()), [('ytdl-test', '1')])
|
||||||
headers['Ytdl-test'] = '2'
|
headers['Ytdl-test'] = '2'
|
||||||
self.assertEqual(list(headers.items()), [('Ytdl-Test', '2')])
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '2')])
|
||||||
|
self.assertEqual(list(headers.sensitive().items()), [('Ytdl-test', '2')])
|
||||||
self.assertTrue('ytDl-Test' in headers)
|
self.assertTrue('ytDl-Test' in headers)
|
||||||
self.assertEqual(str(headers), str(dict(headers)))
|
self.assertEqual(str(headers), str(dict(headers)))
|
||||||
self.assertEqual(repr(headers), str(dict(headers)))
|
self.assertEqual(repr(headers), str(dict(headers)))
|
||||||
|
|
||||||
headers.update({'X-dlp': 'data'})
|
headers.update({'X-dlp': 'data'})
|
||||||
self.assertEqual(set(headers.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data')})
|
self.assertEqual(set(headers.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data')})
|
||||||
|
self.assertEqual(set(headers.sensitive().items()), {('Ytdl-test', '2'), ('X-dlp', 'data')})
|
||||||
self.assertEqual(dict(headers), {'Ytdl-Test': '2', 'X-Dlp': 'data'})
|
self.assertEqual(dict(headers), {'Ytdl-Test': '2', 'X-Dlp': 'data'})
|
||||||
self.assertEqual(len(headers), 2)
|
self.assertEqual(len(headers), 2)
|
||||||
self.assertEqual(headers.copy(), headers)
|
self.assertEqual(headers.copy(), headers)
|
||||||
headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, **headers, **{'X-dlp': 'data2'})
|
headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, headers, **{'X-dlP': 'data2'})
|
||||||
self.assertEqual(set(headers2.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data2')})
|
self.assertEqual(set(headers2.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data2')})
|
||||||
|
self.assertEqual(set(headers2.sensitive().items()), {('Ytdl-test', '2'), ('X-dlP', 'data2')})
|
||||||
self.assertEqual(len(headers2), 2)
|
self.assertEqual(len(headers2), 2)
|
||||||
headers2.clear()
|
headers2.clear()
|
||||||
self.assertEqual(len(headers2), 0)
|
self.assertEqual(len(headers2), 0)
|
||||||
@@ -2082,16 +2131,23 @@ def test_http_header_dict(self):
|
|||||||
# ensure we prefer latter headers
|
# ensure we prefer latter headers
|
||||||
headers3 = HTTPHeaderDict({'Ytdl-TeSt': 1}, {'Ytdl-test': 2})
|
headers3 = HTTPHeaderDict({'Ytdl-TeSt': 1}, {'Ytdl-test': 2})
|
||||||
self.assertEqual(set(headers3.items()), {('Ytdl-Test', '2')})
|
self.assertEqual(set(headers3.items()), {('Ytdl-Test', '2')})
|
||||||
|
self.assertEqual(set(headers3.sensitive().items()), {('Ytdl-test', '2')})
|
||||||
del headers3['ytdl-tesT']
|
del headers3['ytdl-tesT']
|
||||||
self.assertEqual(dict(headers3), {})
|
self.assertEqual(dict(headers3), {})
|
||||||
|
|
||||||
headers4 = HTTPHeaderDict({'ytdl-test': 'data;'})
|
headers4 = HTTPHeaderDict({'ytdl-test': 'data;'})
|
||||||
self.assertEqual(set(headers4.items()), {('Ytdl-Test', 'data;')})
|
self.assertEqual(set(headers4.items()), {('Ytdl-Test', 'data;')})
|
||||||
|
self.assertEqual(set(headers4.sensitive().items()), {('ytdl-test', 'data;')})
|
||||||
|
|
||||||
# common mistake: strip whitespace from values
|
# common mistake: strip whitespace from values
|
||||||
# https://github.com/yt-dlp/yt-dlp/issues/8729
|
# https://github.com/yt-dlp/yt-dlp/issues/8729
|
||||||
headers5 = HTTPHeaderDict({'ytdl-test': ' data; '})
|
headers5 = HTTPHeaderDict({'ytdl-test': ' data; '})
|
||||||
self.assertEqual(set(headers5.items()), {('Ytdl-Test', 'data;')})
|
self.assertEqual(set(headers5.items()), {('Ytdl-Test', 'data;')})
|
||||||
|
self.assertEqual(set(headers5.sensitive().items()), {('ytdl-test', 'data;')})
|
||||||
|
|
||||||
|
# test if picklable
|
||||||
|
headers6 = HTTPHeaderDict(a=1, b=2)
|
||||||
|
self.assertEqual(pickle.loads(pickle.dumps(headers6)), headers6)
|
||||||
|
|
||||||
def test_extract_basic_auth(self):
|
def test_extract_basic_auth(self):
|
||||||
assert extract_basic_auth('http://:foo.bar') == ('http://:foo.bar', None)
|
assert extract_basic_auth('http://:foo.bar') == ('http://:foo.bar', None)
|
||||||
@@ -2101,7 +2157,7 @@ def test_extract_basic_auth(self):
|
|||||||
assert extract_basic_auth('http://user:@foo.bar') == ('http://foo.bar', 'Basic dXNlcjo=')
|
assert extract_basic_auth('http://user:@foo.bar') == ('http://foo.bar', 'Basic dXNlcjo=')
|
||||||
assert extract_basic_auth('http://user:pass@foo.bar') == ('http://foo.bar', 'Basic dXNlcjpwYXNz')
|
assert extract_basic_auth('http://user:pass@foo.bar') == ('http://foo.bar', 'Basic dXNlcjpwYXNz')
|
||||||
|
|
||||||
@unittest.skipUnless(compat_os_name == 'nt', 'Only relevant on Windows')
|
@unittest.skipUnless(os.name == 'nt', 'Only relevant on Windows')
|
||||||
def test_windows_escaping(self):
|
def test_windows_escaping(self):
|
||||||
tests = [
|
tests = [
|
||||||
'test"&',
|
'test"&',
|
||||||
@@ -2135,6 +2191,47 @@ def run_shell(args):
|
|||||||
assert run_shell(args) == expected
|
assert run_shell(args) == expected
|
||||||
assert run_shell(shell_quote(args, shell=True)) == expected
|
assert run_shell(shell_quote(args, shell=True)) == expected
|
||||||
|
|
||||||
|
def test_partial_application(self):
|
||||||
|
assert callable(int_or_none(scale=10)), 'missing positional parameter should apply partially'
|
||||||
|
assert int_or_none(10, scale=0.1) == 100, 'positionally passed argument should call function'
|
||||||
|
assert int_or_none(v=10) == 10, 'keyword passed positional should call function'
|
||||||
|
assert int_or_none(scale=0.1)(10) == 100, 'call after partial application should call the function'
|
||||||
|
|
||||||
|
_JWT_KEY = '12345678'
|
||||||
|
_JWT_HEADERS_1 = {'a': 'b'}
|
||||||
|
_JWT_HEADERS_2 = {'typ': 'JWT', 'alg': 'HS256'}
|
||||||
|
_JWT_HEADERS_3 = {'typ': 'JWT', 'alg': 'RS256'}
|
||||||
|
_JWT_HEADERS_4 = {'c': 'd', 'alg': 'ES256'}
|
||||||
|
_JWT_DECODED = {
|
||||||
|
'foo': 'bar',
|
||||||
|
'qux': 'baz',
|
||||||
|
}
|
||||||
|
_JWT_SIMPLE = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.fKojvTWqnjNTbsdoDTmYNc4tgYAG3h_SWRzM77iLH0U'
|
||||||
|
_JWT_WITH_EXTRA_HEADERS = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImEiOiJiIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.Ia91-B77yasfYM7jsB6iVKLew-3rO6ITjNmjWUVXCvQ'
|
||||||
|
_JWT_WITH_REORDERED_HEADERS = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.slg-7COta5VOfB36p3tqV4MGPV6TTA_ouGnD48UEVq4'
|
||||||
|
_JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.XWp496oVgQnoits0OOocutdjxoaQwn4GUWWxUsKENPM'
|
||||||
|
_JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG = 'eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCIsImMiOiJkIn0.eyJmb28iOiJiYXIiLCJxdXgiOiJiYXoifQ.oM_tc7IkfrwkoRh43rFFE1wOi3J3mQGwx7_lMyKQqDg'
|
||||||
|
|
||||||
|
def test_jwt_encode(self):
|
||||||
|
def test(expected, headers={}):
|
||||||
|
self.assertEqual(jwt_encode(self._JWT_DECODED, self._JWT_KEY, headers=headers), expected)
|
||||||
|
|
||||||
|
test(self._JWT_SIMPLE)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS, headers=self._JWT_HEADERS_1)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS, headers=self._JWT_HEADERS_2)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG, headers=self._JWT_HEADERS_3)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG, headers=self._JWT_HEADERS_4)
|
||||||
|
|
||||||
|
def test_jwt_decode_hs256(self):
|
||||||
|
def test(inp):
|
||||||
|
self.assertEqual(jwt_decode_hs256(inp), self._JWT_DECODED)
|
||||||
|
|
||||||
|
test(self._JWT_SIMPLE)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS)
|
||||||
|
test(self._JWT_WITH_REORDERED_HEADERS_AND_RS256_ALG)
|
||||||
|
test(self._JWT_WITH_EXTRA_HEADERS_AND_ES256_ALG)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ def test_private_info_arg(self):
|
|||||||
'--username', 'johnsmith@gmail.com',
|
'--username', 'johnsmith@gmail.com',
|
||||||
'--password', 'my_secret_password',
|
'--password', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -36,7 +36,7 @@ def test_private_info_shortarg(self):
|
|||||||
'-u', 'johnsmith@gmail.com',
|
'-u', 'johnsmith@gmail.com',
|
||||||
'-p', 'my_secret_password',
|
'-p', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
@@ -50,7 +50,7 @@ def test_private_info_eq(self):
|
|||||||
'--username=johnsmith@gmail.com',
|
'--username=johnsmith@gmail.com',
|
||||||
'--password=my_secret_password',
|
'--password=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -64,7 +64,7 @@ def test_private_info_shortarg_eq(self):
|
|||||||
'-u=johnsmith@gmail.com',
|
'-u=johnsmith@gmail.com',
|
||||||
'-p=my_secret_password',
|
'-p=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user