mirror of
https://github.com/yt-dlp/yt-dlp
synced 2025-12-16 22:25:40 +07:00
Compare commits
1055 Commits
2024.05.26
...
cb78440e46
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb78440e46 | ||
|
|
b46c572b26 | ||
|
|
7eff676183 | ||
|
|
1ac7e6005c | ||
|
|
f3597cfafc | ||
|
|
3ef867451c | ||
|
|
ade8c2b36f | ||
|
|
19c5d7c530 | ||
|
|
e6414d64e7 | ||
|
|
c96e9291ab | ||
|
|
4b4223b436 | ||
|
|
c63b4e2a2b | ||
|
|
f3c255b63b | ||
|
|
646904cd3a | ||
|
|
a0bda3b786 | ||
|
|
228ae9f0f2 | ||
|
|
f87cfadb5c | ||
|
|
a1d6351c3f | ||
|
|
a86eeaadf2 | ||
|
|
5dde0d0c9f | ||
|
|
5767fb4ab1 | ||
|
|
73fd850d17 | ||
|
|
8636a9bac3 | ||
|
|
7af6d81f35 | ||
|
|
a56217f9f6 | ||
|
|
afc44022d0 | ||
|
|
1d2f0edaf9 | ||
|
|
fa35eb27ea | ||
|
|
ffb7b7f446 | ||
|
|
61cf34f544 | ||
|
|
e8a6b1ca92 | ||
|
|
10dea209d2 | ||
|
|
52f3c56e83 | ||
|
|
79f9232ffb | ||
|
|
ee98be4ad7 | ||
|
|
c0c9f30695 | ||
|
|
cacd1630a1 | ||
|
|
6224a38988 | ||
|
|
d6ee677253 | ||
|
|
0046fbcbfc | ||
|
|
ee3a106f34 | ||
|
|
6d05cee4df | ||
|
|
1c2ad94353 | ||
|
|
808b1fed76 | ||
|
|
73922e66e4 | ||
|
|
d9e3011fd1 | ||
|
|
70f1098312 | ||
|
|
a75399d89f | ||
|
|
c9356f308d | ||
|
|
de7b3c0705 | ||
|
|
2c9091e355 | ||
|
|
dfc0a84c19 | ||
|
|
fe5ae54a7b | ||
|
|
78748b506f | ||
|
|
c7bda2192a | ||
|
|
4e6a693057 | ||
|
|
264044286d | ||
|
|
a98e7f9f58 | ||
|
|
0ea5d5882d | ||
|
|
cdc533b114 | ||
|
|
c2e124881f | ||
|
|
ad55bfcfb7 | ||
|
|
739125d40f | ||
|
|
5f94f05490 | ||
|
|
5d7678195a | ||
|
|
eafedc2181 | ||
|
|
8eb8695139 | ||
|
|
df160ab18d | ||
|
|
6d41aaf21c | ||
|
|
a6673a8e82 | ||
|
|
87be1bb96a | ||
|
|
ccc25d6710 | ||
|
|
5513036104 | ||
|
|
bd5ed90419 | ||
|
|
88e2a2de8e | ||
|
|
12b57d2858 | ||
|
|
b7b7910d96 | ||
|
|
50e452fd7d | ||
|
|
94c5622be9 | ||
|
|
7df5acc546 | ||
|
|
4429fd0450 | ||
|
|
2e81e298cd | ||
|
|
7f5d9f8543 | ||
|
|
f8750504c2 | ||
|
|
8821682f15 | ||
|
|
08d7899683 | ||
|
|
98b6b0d339 | ||
|
|
bf5d18016b | ||
|
|
4bc19adc87 | ||
|
|
b2c01d0498 | ||
|
|
e123a48f11 | ||
|
|
820c6e2445 | ||
|
|
677997d84e | ||
|
|
b81e9272dc | ||
|
|
df4b4e8ccf | ||
|
|
f3829463c7 | ||
|
|
ae3923b6b2 | ||
|
|
8ab262c66b | ||
|
|
e2d37bcc8e | ||
|
|
eb4b3a5fc7 | ||
|
|
65e90aea29 | ||
|
|
17bfaa53ed | ||
|
|
8cb037c0b0 | ||
|
|
7d9e48b22a | ||
|
|
f5cb721185 | ||
|
|
83b8409366 | ||
|
|
ba80446855 | ||
|
|
22ea0688ed | ||
|
|
5c1abcdc49 | ||
|
|
3d9a88bd8e | ||
|
|
9def9a4b0e | ||
|
|
679587dac7 | ||
|
|
a1c98226a4 | ||
|
|
c8ede5f34d | ||
|
|
a183837ec8 | ||
|
|
067062bb87 | ||
|
|
8597a4331e | ||
|
|
48a214bef4 | ||
|
|
6a763a55d8 | ||
|
|
e6e6b51214 | ||
|
|
7c9b10ebc8 | ||
|
|
cd94e70040 | ||
|
|
7c27965ff6 | ||
|
|
50136eeeb3 | ||
|
|
603acdff07 | ||
|
|
d925e92b71 | ||
|
|
ed24640943 | ||
|
|
76bb46002c | ||
|
|
1e28f6bf74 | ||
|
|
0b51005b48 | ||
|
|
223baa81f6 | ||
|
|
18fe696df9 | ||
|
|
487a90c8ef | ||
|
|
8cd37b85d4 | ||
|
|
5c7ad68ff1 | ||
|
|
1ddbd033f0 | ||
|
|
fec30c56f0 | ||
|
|
d6950c27af | ||
|
|
3bd9154412 | ||
|
|
8f4a908300 | ||
|
|
f1ba9f4ddb | ||
|
|
5c8bcfdbc6 | ||
|
|
895e762a83 | ||
|
|
39b7b8ddc7 | ||
|
|
526410b4af | ||
|
|
f29acc4a6e | ||
|
|
4dbe96459d | ||
|
|
a03c37b44e | ||
|
|
fcea3edb5c | ||
|
|
415b6d9ca8 | ||
|
|
575753b9f3 | ||
|
|
c2fc4f3e7f | ||
|
|
07247d6c20 | ||
|
|
f63a7e41d1 | ||
|
|
7b8a8abb98 | ||
|
|
a97f4cb57e | ||
|
|
d154dc3dcf | ||
|
|
438d3f06b3 | ||
|
|
74b4b3b005 | ||
|
|
36e873822b | ||
|
|
d3d1ac8eb2 | ||
|
|
86d74e5cf0 | ||
|
|
6ca9165648 | ||
|
|
82a1390204 | ||
|
|
7540aa1da1 | ||
|
|
35da8df4f8 | ||
|
|
8df121ba59 | ||
|
|
471a2b60e0 | ||
|
|
df0553153e | ||
|
|
7bc53ae799 | ||
|
|
d8200ff0a4 | ||
|
|
0f6b915822 | ||
|
|
374ea049f5 | ||
|
|
6f4c1bb593 | ||
|
|
c22660aed5 | ||
|
|
404bd889d0 | ||
|
|
edf55e8184 | ||
|
|
8a8861d538 | ||
|
|
70f5669951 | ||
|
|
6ae3543d5a | ||
|
|
770119bdd1 | ||
|
|
8e3f8065af | ||
|
|
aea85d525e | ||
|
|
f2919bd28e | ||
|
|
681ed2153d | ||
|
|
bdeb3eb3f2 | ||
|
|
b7de89c910 | ||
|
|
5e4ceb35cf | ||
|
|
e98695549e | ||
|
|
bf366517ef | ||
|
|
c76ce28e06 | ||
|
|
e8d49b1c7f | ||
|
|
a6df5e8a58 | ||
|
|
e8d2807296 | ||
|
|
fe53ebe5b6 | ||
|
|
662af5bb83 | ||
|
|
8175f3738f | ||
|
|
1e0c77ddcc | ||
|
|
e651a53a2f | ||
|
|
f799a4b472 | ||
|
|
38c2bf4026 | ||
|
|
6ff135c319 | ||
|
|
cd31c319e3 | ||
|
|
6539ee1947 | ||
|
|
43dedbe639 | ||
|
|
05e553e9d1 | ||
|
|
1c6068af99 | ||
|
|
71f30921a2 | ||
|
|
121647705a | ||
|
|
70d7687487 | ||
|
|
42ca3d601e | ||
|
|
62e2a9c0d5 | ||
|
|
28b68f6875 | ||
|
|
682334e4b3 | ||
|
|
b831406a1d | ||
|
|
23c658b9cb | ||
|
|
cc5a5caac5 | ||
|
|
66aa21dc5a | ||
|
|
57186f958f | ||
|
|
daa1859be1 | ||
|
|
e8c2bf798b | ||
|
|
1fe83b0111 | ||
|
|
30302df22b | ||
|
|
3e609b2ced | ||
|
|
d399505fdf | ||
|
|
61d4cd0bc0 | ||
|
|
4385480795 | ||
|
|
485de69dbf | ||
|
|
0adeb1e54b | ||
|
|
afaf60d9fd | ||
|
|
7e3f48d64d | ||
|
|
59765ecbc0 | ||
|
|
c59ad2b066 | ||
|
|
eed94c7306 | ||
|
|
3e918d825d | ||
|
|
035b1ece8f | ||
|
|
9951fdd0d0 | ||
|
|
959ac99e98 | ||
|
|
d88b304d44 | ||
|
|
b15aa8d772 | ||
|
|
d3edc5d52a | ||
|
|
060c6a4501 | ||
|
|
6be26626f7 | ||
|
|
ef103b2d11 | ||
|
|
3e49bc8a1b | ||
|
|
2ac3eb9837 | ||
|
|
8820101aa3 | ||
|
|
a4561c7a66 | ||
|
|
32809eb2da | ||
|
|
f9dff95cb1 | ||
|
|
790c286ce3 | ||
|
|
87e3dc8c7f | ||
|
|
1a8474c3ca | ||
|
|
09982bc33e | ||
|
|
c8329fc572 | ||
|
|
1f27a9f8ba | ||
|
|
4919051e44 | ||
|
|
5f951ce929 | ||
|
|
28bf46b7da | ||
|
|
b8abd255e4 | ||
|
|
c1ac543c81 | ||
|
|
dcc4cba39e | ||
|
|
3a84be9d16 | ||
|
|
d42a6ff0c4 | ||
|
|
ade876efb3 | ||
|
|
7e0af2b1f0 | ||
|
|
d57a0b5aa7 | ||
|
|
6fb3947c0d | ||
|
|
9f54ea3898 | ||
|
|
07d1d85f63 | ||
|
|
5d693446e8 | ||
|
|
23e9389f93 | ||
|
|
6d39c420f7 | ||
|
|
85c3fa1925 | ||
|
|
b4b4486eff | ||
|
|
630f3389c3 | ||
|
|
a6db1d297a | ||
|
|
0f33950c77 | ||
|
|
b5fea53f20 | ||
|
|
5245231e4a | ||
|
|
3ae61e0f31 | ||
|
|
a5d697f62d | ||
|
|
6e5bee418b | ||
|
|
5b57b72c1a | ||
|
|
2aaf1aa71d | ||
|
|
7b4c96e089 | ||
|
|
0b359b184d | ||
|
|
805519bfaa | ||
|
|
aa9f1f4d57 | ||
|
|
fd36b8f31b | ||
|
|
99093e96fd | ||
|
|
7c49a93788 | ||
|
|
884f35d54a | ||
|
|
c23d837b65 | ||
|
|
a7113722ec | ||
|
|
0e68332bcb | ||
|
|
422cc8cb2f | ||
|
|
fca94ac5d6 | ||
|
|
b342d27f3f | ||
|
|
b6328ca050 | ||
|
|
0b41746964 | ||
|
|
c316416b97 | ||
|
|
e99c0b838a | ||
|
|
c2ff2dbaec | ||
|
|
ca5cce5b07 | ||
|
|
f3008bc5f8 | ||
|
|
30fa54280b | ||
|
|
b018784498 | ||
|
|
11b9416e10 | ||
|
|
35fc33fbc5 | ||
|
|
b16722ede8 | ||
|
|
500761e41a | ||
|
|
2ba5391cd6 | ||
|
|
e9f157669e | ||
|
|
958153a226 | ||
|
|
1b88384634 | ||
|
|
7b81634fb1 | ||
|
|
7e2504f941 | ||
|
|
4bd9a7ade7 | ||
|
|
b5bd057fe8 | ||
|
|
5e292baad6 | ||
|
|
0a6b104489 | ||
|
|
06c1a8cdff | ||
|
|
99b85ac102 | ||
|
|
eff0759705 | ||
|
|
1838a1ce5d | ||
|
|
2600849bad | ||
|
|
3bd3029160 | ||
|
|
a4ce4327c9 | ||
|
|
c57412d1f9 | ||
|
|
5b559d0072 | ||
|
|
8f94b76cbf | ||
|
|
ff6f94041a | ||
|
|
73bf102116 | ||
|
|
1722c55400 | ||
|
|
e6bd4a3da2 | ||
|
|
51887484e4 | ||
|
|
ba090caeaa | ||
|
|
339614a173 | ||
|
|
aa863ddab9 | ||
|
|
db162b76f6 | ||
|
|
e3c605a61f | ||
|
|
97ddfefeb4 | ||
|
|
a8bf0011bd | ||
|
|
13e5516271 | ||
|
|
03dba2012d | ||
|
|
5d96527be8 | ||
|
|
1fd0e88b67 | ||
|
|
231349786e | ||
|
|
f37d599a69 | ||
|
|
9e38b273b7 | ||
|
|
4e7c1ea346 | ||
|
|
e1b6062f8c | ||
|
|
c723c4e5e7 | ||
|
|
148a1eb4c5 | ||
|
|
85c8a405e3 | ||
|
|
943083edcd | ||
|
|
3fe72e9eea | ||
|
|
d30a49742c | ||
|
|
6d265388c6 | ||
|
|
a9b3700698 | ||
|
|
201812100f | ||
|
|
cc749a8a3b | ||
|
|
f7bbf5a617 | ||
|
|
b5be29fa58 | ||
|
|
6121559e02 | ||
|
|
2e5bf002da | ||
|
|
6693d66033 | ||
|
|
b094747e93 | ||
|
|
98f8eec956 | ||
|
|
0daddc780d | ||
|
|
2d7949d564 | ||
|
|
ed108b3ea4 | ||
|
|
eee90acc47 | ||
|
|
711c5d5d09 | ||
|
|
89c1b349ad | ||
|
|
0ee1102268 | ||
|
|
7794374de8 | ||
|
|
538eb30567 | ||
|
|
f8051e3a61 | ||
|
|
52f9729c9a | ||
|
|
1a8a03ea8d | ||
|
|
e0d6c08229 | ||
|
|
53ea743a9c | ||
|
|
415b4c9f95 | ||
|
|
7977b329ed | ||
|
|
e491fd4d09 | ||
|
|
32ed5f107c | ||
|
|
167d7a9f0f | ||
|
|
83fabf3524 | ||
|
|
00b1bec552 | ||
|
|
c7e575e316 | ||
|
|
31e090cb78 | ||
|
|
545c1a5b6f | ||
|
|
f569be4602 | ||
|
|
2685654a37 | ||
|
|
abf58dcd6a | ||
|
|
20f288bdc2 | ||
|
|
f475e8b529 | ||
|
|
41c0a1fb89 | ||
|
|
a7d9a5eb79 | ||
|
|
586b557b12 | ||
|
|
317f4b8006 | ||
|
|
6839276496 | ||
|
|
cbcfe6378d | ||
|
|
7dbb47f84f | ||
|
|
464c84fedf | ||
|
|
7a7b85c901 | ||
|
|
d880e06080 | ||
|
|
ded11ebc9a | ||
|
|
ea8498ed53 | ||
|
|
b26bc32579 | ||
|
|
f123cc83b3 | ||
|
|
0feec6dc13 | ||
|
|
1d0f6539c4 | ||
|
|
17cf9088d0 | ||
|
|
9064d2482d | ||
|
|
8f303afb43 | ||
|
|
5328eda882 | ||
|
|
b77e5a553a | ||
|
|
505b400795 | ||
|
|
74fc2ae12c | ||
|
|
7be14109a6 | ||
|
|
61c9a938b3 | ||
|
|
fd8394bc50 | ||
|
|
22ac81a069 | ||
|
|
25cd7c1ecb | ||
|
|
28f04e8a5e | ||
|
|
a3e91df30a | ||
|
|
80736b9c90 | ||
|
|
1ae6bff564 | ||
|
|
b37ff4de5b | ||
|
|
3690e91265 | ||
|
|
8cb08028f5 | ||
|
|
1cf39ddf3d | ||
|
|
c2d6659d10 | ||
|
|
26feac3dd1 | ||
|
|
70599e53b7 | ||
|
|
8d127b18f8 | ||
|
|
7d05aa99c6 | ||
|
|
36da6360e1 | ||
|
|
e7e3b7a55c | ||
|
|
dce8234624 | ||
|
|
2381881fe5 | ||
|
|
741fd809bc | ||
|
|
34a061a295 | ||
|
|
9032f98136 | ||
|
|
de271a06fd | ||
|
|
d596824c2f | ||
|
|
88eb1e7a9a | ||
|
|
f5a37ea40e | ||
|
|
f07ee91c71 | ||
|
|
ed8ad1b4d6 | ||
|
|
839d643253 | ||
|
|
f5736bb35b | ||
|
|
9d26daa04a | ||
|
|
73a26f9ee6 | ||
|
|
4e69a626cc | ||
|
|
77aa15e98f | ||
|
|
cb271d445b | ||
|
|
ceab4d5ed6 | ||
|
|
ed6c6d7eef | ||
|
|
f484c51599 | ||
|
|
72ba487930 | ||
|
|
74e90dd9b8 | ||
|
|
1d45e30537 | ||
|
|
3c1c75ecb8 | ||
|
|
7faa18b83d | ||
|
|
a473e59233 | ||
|
|
45f01de00e | ||
|
|
db6d1f145a | ||
|
|
a3f2b54c25 | ||
|
|
91832111a1 | ||
|
|
425017531f | ||
|
|
58d0c83457 | ||
|
|
4ebf41309d | ||
|
|
e1847535e2 | ||
|
|
5361a7c6e2 | ||
|
|
349f36606f | ||
|
|
5e457af57f | ||
|
|
61046c3161 | ||
|
|
07f04005e4 | ||
|
|
e465b078ea | ||
|
|
d63696f23a | ||
|
|
bb321cfdc3 | ||
|
|
5fc521cbd0 | ||
|
|
f033d86b96 | ||
|
|
9a1ec1d36e | ||
|
|
2956035912 | ||
|
|
22e34adbd7 | ||
|
|
6a6d97b2cb | ||
|
|
3ddbebb3c6 | ||
|
|
48be862b32 | ||
|
|
a8b9ff3c2a | ||
|
|
6eaa574c82 | ||
|
|
ecee97b4fa | ||
|
|
a550dfc904 | ||
|
|
336b33e72f | ||
|
|
9dde546e7e | ||
|
|
66e0bab814 | ||
|
|
801afeac91 | ||
|
|
86ab79e1a5 | ||
|
|
3396eb50dc | ||
|
|
5086d4aed6 | ||
|
|
9491b44032 | ||
|
|
b7fbb5a0a1 | ||
|
|
4054a2b623 | ||
|
|
b9c979461b | ||
|
|
9d5e6de2e7 | ||
|
|
9bf23902ce | ||
|
|
be5af3f9e9 | ||
|
|
fe4f14b836 | ||
|
|
b872ffec50 | ||
|
|
e2dfccaf80 | ||
|
|
b4488a9e12 | ||
|
|
f36e4b6e65 | ||
|
|
983095485c | ||
|
|
bbada3ec07 | ||
|
|
8305df0001 | ||
|
|
7223d29569 | ||
|
|
f5fb2229e6 | ||
|
|
89a68c4857 | ||
|
|
9b868518a1 | ||
|
|
2ee3a0aff9 | ||
|
|
01a8be4c23 | ||
|
|
ebac65aa9e | ||
|
|
4815dac131 | ||
|
|
95f8df2f79 | ||
|
|
e67d786c7c | ||
|
|
d9a53cc1e6 | ||
|
|
83b119dadb | ||
|
|
06f6de78db | ||
|
|
3380febe99 | ||
|
|
be0d819e11 | ||
|
|
df9ebeec00 | ||
|
|
17504f2535 | ||
|
|
4432a9390c | ||
|
|
05c8023a27 | ||
|
|
bd0a668169 | ||
|
|
b8b4754704 | ||
|
|
9d70abe4de | ||
|
|
8eb9c1bf3b | ||
|
|
42b7440963 | ||
|
|
172d5fcd77 | ||
|
|
7d18fed8f1 | ||
|
|
79ec2fdff7 | ||
|
|
3042afb5fe | ||
|
|
ad60137c14 | ||
|
|
0bb3978862 | ||
|
|
7508e34f20 | ||
|
|
9807181cfb | ||
|
|
7126b47260 | ||
|
|
eb1417786a | ||
|
|
6933f5670c | ||
|
|
26a502fc72 | ||
|
|
652827d5a0 | ||
|
|
0e1697232f | ||
|
|
9f77e04c76 | ||
|
|
c034d65548 | ||
|
|
480125560a | ||
|
|
a59abe0636 | ||
|
|
a90641c836 | ||
|
|
65c3c58c0a | ||
|
|
99ea297875 | ||
|
|
6deeda5c11 | ||
|
|
7f3006eb0c | ||
|
|
4445f37a7a | ||
|
|
3a1583ca75 | ||
|
|
a3e0c7d3b2 | ||
|
|
f7a1f2d813 | ||
|
|
9deed13d7c | ||
|
|
c2e6e1d5f7 | ||
|
|
9c3e8b1696 | ||
|
|
4985a40417 | ||
|
|
01a63629a2 | ||
|
|
be69468752 | ||
|
|
5271ef48c6 | ||
|
|
d48e612609 | ||
|
|
5c4c2ddfaa | ||
|
|
ec17fb16e8 | ||
|
|
e7882b682b | ||
|
|
6ca23ffaa4 | ||
|
|
f53553087d | ||
|
|
4ecb833472 | ||
|
|
2081634474 | ||
|
|
c987be0acb | ||
|
|
14cd7f3443 | ||
|
|
4ca8c44a07 | ||
|
|
241ace4f10 | ||
|
|
1295bbedd4 | ||
|
|
19edaa44fc | ||
|
|
10b7ff68e9 | ||
|
|
0d9f061d38 | ||
|
|
517ddf3c3f | ||
|
|
03c3d70577 | ||
|
|
f8d0161455 | ||
|
|
d59f14a0a7 | ||
|
|
817483ccc6 | ||
|
|
861aeec449 | ||
|
|
57c717fee4 | ||
|
|
9fb8ab2ff6 | ||
|
|
18a28514e3 | ||
|
|
5ff7a43623 | ||
|
|
3b45319344 | ||
|
|
421bc72103 | ||
|
|
d4f5be1735 | ||
|
|
797d2472a2 | ||
|
|
3b99a0f0e0 | ||
|
|
c709cc41cb | ||
|
|
4850ce91d1 | ||
|
|
e2e73b5c65 | ||
|
|
13825ab778 | ||
|
|
bc88b904cd | ||
|
|
76ac023ff0 | ||
|
|
b3007c44cd | ||
|
|
78912ed9c8 | ||
|
|
bb69f5dab7 | ||
|
|
6d304133ab | ||
|
|
9ff330948c | ||
|
|
fc12e724a3 | ||
|
|
61ae5dc34a | ||
|
|
4651679104 | ||
|
|
ff44ed5306 | ||
|
|
cdcf1e8672 | ||
|
|
f7d071e8aa | ||
|
|
45732e2590 | ||
|
|
7bfb4f72e4 | ||
|
|
5d904b077d | ||
|
|
e7cc02b14d | ||
|
|
f0d4b8a5d6 | ||
|
|
6b91d232e3 | ||
|
|
de82acf876 | ||
|
|
326fb1ffaf | ||
|
|
ccda63934d | ||
|
|
9676b05715 | ||
|
|
f9f24ae376 | ||
|
|
af2c821d74 | ||
|
|
1ef3ee7500 | ||
|
|
20c765d023 | ||
|
|
3fc4608656 | ||
|
|
68221ecc87 | ||
|
|
de30f652ff | ||
|
|
89198bb23b | ||
|
|
a567f97b62 | ||
|
|
1643686104 | ||
|
|
bbc7591d3b | ||
|
|
c8541f8b13 | ||
|
|
a3c0321825 | ||
|
|
dade5e35c8 | ||
|
|
e2ef4fece6 | ||
|
|
1f489f4a45 | ||
|
|
75079f4e3f | ||
|
|
712d2abb32 | ||
|
|
8346b54915 | ||
|
|
1f4e1e85a2 | ||
|
|
763ed06ee6 | ||
|
|
3c14e9191f | ||
|
|
0b6b7742c2 | ||
|
|
3905f64920 | ||
|
|
65cf46cddd | ||
|
|
9f42e68a74 | ||
|
|
6fc85f617a | ||
|
|
d298693b1b | ||
|
|
09a6c68712 | ||
|
|
1a8851b689 | ||
|
|
b91c3925c2 | ||
|
|
3d3ee458c1 | ||
|
|
2037a6414f | ||
|
|
5421669626 | ||
|
|
dc3c4fddcc | ||
|
|
5460cd9189 | ||
|
|
f6c73aad5f | ||
|
|
d5e2a379f2 | ||
|
|
bc262bcad4 | ||
|
|
f4d3e9e6dc | ||
|
|
6fef824025 | ||
|
|
4bd2655398 | ||
|
|
a95ee6d880 | ||
|
|
4c85ccd136 | ||
|
|
2feb28028e | ||
|
|
fca3eb5f8b | ||
|
|
2e49c789d3 | ||
|
|
354cb4026c | ||
|
|
cfa76f35d2 | ||
|
|
2b67ac300a | ||
|
|
c038a7b187 | ||
|
|
a13a336aa6 | ||
|
|
dc16876480 | ||
|
|
f05a1cd149 | ||
|
|
d8fb349086 | ||
|
|
2bea793632 | ||
|
|
62cba8a1be | ||
|
|
239f5f36fe | ||
|
|
0d146c1e36 | ||
|
|
cd0f934604 | ||
|
|
360aed810a | ||
|
|
00dcde7286 | ||
|
|
910ecc4229 | ||
|
|
0a0d80800b | ||
|
|
e0500cbf79 | ||
|
|
4b5eec0aaa | ||
|
|
fe70f20aed | ||
|
|
c7316373c0 | ||
|
|
e0f1ae813b | ||
|
|
7d6c259a03 | ||
|
|
16336c51d0 | ||
|
|
ccf0a6b86b | ||
|
|
f919729538 | ||
|
|
7ea2787920 | ||
|
|
f7257588bd | ||
|
|
da252d9d32 | ||
|
|
e079ffbda6 | ||
|
|
2009cb27e1 | ||
|
|
f351440f1d | ||
|
|
f9d98509a8 | ||
|
|
37cd7660ea | ||
|
|
d867f99622 | ||
|
|
10fc719bc7 | ||
|
|
eb15fd5a32 | ||
|
|
7cecd299e4 | ||
|
|
52c0ffe40a | ||
|
|
637d62a3a9 | ||
|
|
f95a92b3d0 | ||
|
|
1d253b0a27 | ||
|
|
720b3dc453 | ||
|
|
d215fba7ed | ||
|
|
8388ec256f | ||
|
|
6365e92589 | ||
|
|
70c55cb08f | ||
|
|
c699bafc50 | ||
|
|
eb64ae7d5d | ||
|
|
c014fbcddc | ||
|
|
39d79c9b9c | ||
|
|
f2a4983df7 | ||
|
|
bacc31b05a | ||
|
|
a9f85670d0 | ||
|
|
6b43a8d84b | ||
|
|
2db8c2e7d5 | ||
|
|
f9c8deb4e5 | ||
|
|
0ec9bfed4d | ||
|
|
c673731061 | ||
|
|
e398217aae | ||
|
|
c39016f66d | ||
|
|
b83ca24eb7 | ||
|
|
240a7d43c8 | ||
|
|
f13df591d4 | ||
|
|
be3579aaf0 | ||
|
|
85fdc66b6e | ||
|
|
282e19db82 | ||
|
|
197d0b03b6 | ||
|
|
b03267bf06 | ||
|
|
4613096f2e | ||
|
|
838f4385de | ||
|
|
d135823137 | ||
|
|
59f8dd8239 | ||
|
|
9c6534da81 | ||
|
|
a403dcf9be | ||
|
|
754940e9a5 | ||
|
|
beae2db127 | ||
|
|
3945677a75 | ||
|
|
b103aca24d | ||
|
|
5c7a5aaab2 | ||
|
|
422195ec70 | ||
|
|
a6783a3b99 | ||
|
|
428ffb75aa | ||
|
|
b6dc2c49e8 | ||
|
|
76802f4613 | ||
|
|
d569a88452 | ||
|
|
88402b714e | ||
|
|
5bc5fb2835 | ||
|
|
f93c16395c | ||
|
|
f101e5d34c | ||
|
|
330335386d | ||
|
|
0a3991edae | ||
|
|
5c880ef42e | ||
|
|
21cdcf03a2 | ||
|
|
6abef74232 | ||
|
|
9acf79c91a | ||
|
|
57212a5f97 | ||
|
|
c29f5a7fae | ||
|
|
ec9b25043f | ||
|
|
914af9a0cf | ||
|
|
c998238c2e | ||
|
|
d784464399 | ||
|
|
dd2e244469 | ||
|
|
ea9e35d85f | ||
|
|
87884f1558 | ||
|
|
2a246749ec | ||
|
|
67adeb7bab | ||
|
|
a886cf3e90 | ||
|
|
e68b4c19af | ||
|
|
b8635c1d47 | ||
|
|
46fe60ff19 | ||
|
|
0b7ec08816 | ||
|
|
40054cb4a7 | ||
|
|
fed53d70bd | ||
|
|
ec2f4bf082 | ||
|
|
87408ccfd7 | ||
|
|
c4d95f67dd | ||
|
|
5af774d7a3 | ||
|
|
0f593dca9f | ||
|
|
8de431ec97 | ||
|
|
679c68240a | ||
|
|
3148c1822f | ||
|
|
7af1ddaaf2 | ||
|
|
fbc66e3ab3 | ||
|
|
64d84d75ca | ||
|
|
dcfeea4dd5 | ||
|
|
cba7868502 | ||
|
|
d710a6ca7c | ||
|
|
85b87c991a | ||
|
|
16eb28026a | ||
|
|
1a830394a2 | ||
|
|
edfd095b19 | ||
|
|
c5f0f58efd | ||
|
|
babb709605 | ||
|
|
9d43dcb2c5 | ||
|
|
f433871424 | ||
|
|
ceaea731b6 | ||
|
|
5310fa87f6 | ||
|
|
983c58fb7a | ||
|
|
1a176d874e | ||
|
|
079a7bc334 | ||
|
|
cf85cba5d9 | ||
|
|
4b7bec66d8 | ||
|
|
ccb23e1bac | ||
|
|
3a193346ee | ||
|
|
de2062753a | ||
|
|
e59c82a74c | ||
|
|
f91645acea | ||
|
|
b31b81d85f | ||
|
|
6328e2e67a | ||
|
|
5945fc1945 | ||
|
|
c6387abc1a | ||
|
|
cca534cd9e | ||
|
|
7509d692b3 | ||
|
|
63da31b3b2 | ||
|
|
8f4ea14680 | ||
|
|
a1b4ac2b8e | ||
|
|
c08e0b20b5 | ||
|
|
0aa4426e9a | ||
|
|
48d629d461 | ||
|
|
7f909046f4 | ||
|
|
eabb4680fd | ||
|
|
1d84b780cf | ||
|
|
9f5c9a9089 | ||
|
|
a2000bc857 | ||
|
|
5a8a05aebb | ||
|
|
ad0b857f45 | ||
|
|
124f058b54 | ||
|
|
416686ed0c | ||
|
|
b37417e4f9 | ||
|
|
28b0ecba2a | ||
|
|
e2b3634e29 | ||
|
|
fb8b7f226d | ||
|
|
b397a64691 | ||
|
|
5bb1aa04da | ||
|
|
fa2be9a7c6 | ||
|
|
3ad0b7f422 | ||
|
|
4a9bc8c363 | ||
|
|
a06bb58679 | ||
|
|
a555389c9b | ||
|
|
173d54c151 | ||
|
|
4a27b8f092 | ||
|
|
41a241ca6f | ||
|
|
3aa0156e05 | ||
|
|
300c91274f | ||
|
|
d8d473002b | ||
|
|
36f9e602ad | ||
|
|
7adff8caf1 | ||
|
|
fa83d0b36b | ||
|
|
c8c078fe28 | ||
|
|
325001317d | ||
|
|
cc85596d5b | ||
|
|
0e1b941c6b | ||
|
|
3dfd720d09 | ||
|
|
25c1cdaa26 | ||
|
|
d02df303d8 | ||
|
|
5d0176547f | ||
|
|
409f8e9e3b | ||
|
|
b4760c778d | ||
|
|
9431777b4c | ||
|
|
3a3bd00037 | ||
|
|
d1c4d88b2d | ||
|
|
46f4c80bc3 | ||
|
|
0fba08485b | ||
|
|
b6200bdcf3 | ||
|
|
e8e6a982a1 | ||
|
|
7e41628ff5 | ||
|
|
e6f48ca808 | ||
|
|
4115c24d15 | ||
|
|
ad9a8115aa | ||
|
|
41be32e78c | ||
|
|
e978c312d6 | ||
|
|
6f9e653743 | ||
|
|
f0bb28504c | ||
|
|
bef1d4d6fc | ||
|
|
c8d096c5ce | ||
|
|
a7d3235c84 | ||
|
|
d62fef7e07 | ||
|
|
cc88a54bb1 | ||
|
|
b43bd86485 | ||
|
|
232e6db30c | ||
|
|
49f3741a82 | ||
|
|
a065086640 | ||
|
|
4d92312083 | ||
|
|
fc5eecfa31 | ||
|
|
406f4c2e47 | ||
|
|
c86891eb94 | ||
|
|
bb8bf1db99 | ||
|
|
e7d73bc453 | ||
|
|
919540a964 | ||
|
|
0088c6de23 | ||
|
|
abe10131fc | ||
|
|
ffd7781d65 | ||
|
|
efb42763de | ||
|
|
bb3936ae2b | ||
|
|
d19fcb9342 | ||
|
|
011b4a04db | ||
|
|
7e3e4779ad | ||
|
|
5260696b1c | ||
|
|
2f1ddfe12a | ||
|
|
4b69e1b53e | ||
|
|
0e539617a4 | ||
|
|
fe15d3178e | ||
|
|
94a1c5e642 | ||
|
|
2b6df93a24 | ||
|
|
ef36d517f9 | ||
|
|
6daf2c27c0 | ||
|
|
28d485714f | ||
|
|
0b77286184 | ||
|
|
6b1e430d8e | ||
|
|
f0993391e6 | ||
|
|
1a34a802f4 | ||
|
|
a0a1bc3d8d | ||
|
|
2f97779f33 | ||
|
|
713b4cd18f | ||
|
|
a3bab4752a | ||
|
|
e046db8a11 | ||
|
|
37c233562d | ||
|
|
89a161e8c6 | ||
|
|
ed1b9ed93d | ||
|
|
d9cbced493 | ||
|
|
66ce3d76d8 | ||
|
|
39e6c4cb44 | ||
|
|
e62fa6b0e0 | ||
|
|
cc0070f649 | ||
|
|
b85eef0a61 | ||
|
|
22870b81ba | ||
|
|
b9afb99e7c | ||
|
|
16da8ef993 | ||
|
|
959b7a379b | ||
|
|
8531d2b03b | ||
|
|
4cd4146924 | ||
|
|
bacd18b7df | ||
|
|
150ecc45d9 | ||
|
|
8b8b442cb0 | ||
|
|
644d84d778 | ||
|
|
ac30941ae6 | ||
|
|
cc1a3098c0 | ||
|
|
705f5b84de | ||
|
|
9b95a6765a | ||
|
|
4f8448896e | ||
|
|
4521f30d14 | ||
|
|
42bfca00a6 | ||
|
|
d2189d3d36 | ||
|
|
04e17ba20a | ||
|
|
bbf84bf55e | ||
|
|
7ead7332af | ||
|
|
0b570f2a90 | ||
|
|
1a6ac547ea | ||
|
|
4b50b292cc | ||
|
|
297b0a3792 | ||
|
|
6c056ea7ae | ||
|
|
39bc699d2e | ||
|
|
b337d2989c | ||
|
|
f0f867f008 | ||
|
|
987a1f94c2 | ||
|
|
4cdc976bd8 | ||
|
|
0d174e8bed | ||
|
|
4862a29854 | ||
|
|
2469119490 | ||
|
|
00766ece0c | ||
|
|
2a1a1b8e67 | ||
|
|
c1c9bb4adb | ||
|
|
6075a029db | ||
|
|
cc767e9490 | ||
|
|
d28aa87e21 | ||
|
|
93d33cb29a | ||
|
|
7799e51895 | ||
|
|
7509791385 | ||
|
|
6403530e2d | ||
|
|
d502f4c6d9 | ||
|
|
773bbb1815 | ||
|
|
cd68258225 | ||
|
|
5ce582448e | ||
|
|
6aaf96a3d6 | ||
|
|
d4b99a2333 | ||
|
|
1d6ab17d07 | ||
|
|
9200bc70c9 | ||
|
|
aefede2556 | ||
|
|
4f5d7be3c5 | ||
|
|
1d369b4096 | ||
|
|
55e3e6fd21 | ||
|
|
36e8dd8325 | ||
|
|
e6a22834df | ||
|
|
b8da8a98f8 | ||
|
|
24f3097ea9 | ||
|
|
054a3ba7d1 | ||
|
|
e8352ad659 | ||
|
|
2a4f2e82db | ||
|
|
61714f4695 | ||
|
|
61edf57f8f | ||
|
|
5b1a2aa978 | ||
|
|
7814c50948 | ||
|
|
54a63e80af | ||
|
|
7a03f88c40 | ||
|
|
f2a4ea1794 | ||
|
|
0953209a85 | ||
|
|
b758877afa | ||
|
|
f3411af12e | ||
|
|
a8520244b8 | ||
|
|
8ca1d57ed0 | ||
|
|
800ec085cc | ||
|
|
96472d72f2 | ||
|
|
7aa322c02c | ||
|
|
9bd8501993 | ||
|
|
90c3721a32 | ||
|
|
d4b52ce3fc | ||
|
|
d6c2c2bc84 | ||
|
|
5dbac313ae | ||
|
|
ca8885edd9 | ||
|
|
4093eb1fcc | ||
|
|
a0d9967f68 | ||
|
|
ea88129784 | ||
|
|
b8e2a5e0e1 | ||
|
|
e53e56b735 | ||
|
|
92a1c4abae | ||
|
|
3690c2f598 | ||
|
|
081708d607 | ||
|
|
d7d861811c | ||
|
|
46c1b7cfec | ||
|
|
add96eb9f8 | ||
|
|
db50f19d76 | ||
|
|
2e5a47da40 | ||
|
|
5fdd13006a | ||
|
|
03334d639d | ||
|
|
8b46ad4d8b | ||
|
|
bef9a9e536 | ||
|
|
111b61ddef | ||
|
|
12b248ce60 | ||
|
|
5e3e19c93c | ||
|
|
c53c2e40fd | ||
|
|
ae2194e1dd | ||
|
|
26603d0b34 |
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -10,4 +10,4 @@ liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators']
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers']
|
||||
|
||||
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
31
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -2,13 +2,11 @@ name: Broken site support
|
||||
description: Report issue with yt-dlp on a supported site
|
||||
labels: [triage, site-bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -24,9 +22,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
@@ -47,6 +45,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
@@ -63,14 +63,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
@@ -2,13 +2,11 @@ name: Site support request
|
||||
description: Request support for a new site
|
||||
labels: [triage, site-request]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -24,9 +22,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
@@ -59,6 +57,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
@@ -75,14 +75,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
description: Request new functionality for a site supported by yt-dlp
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -22,9 +20,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
@@ -55,6 +53,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
@@ -71,14 +71,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
35
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -2,13 +2,11 @@ name: Core bug report
|
||||
description: Report a bug unrelated to any particular site or extractor
|
||||
labels: [triage, bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -20,13 +18,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
@@ -40,6 +34,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
@@ -56,14 +52,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
33
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -1,14 +1,12 @@
|
||||
name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
description: Request a new feature unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -22,9 +20,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
@@ -38,6 +36,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||
@@ -52,14 +52,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
33
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -1,14 +1,12 @@
|
||||
name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
description: Ask a question about using yt-dlp
|
||||
labels: [question]
|
||||
body:
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
@@ -28,9 +26,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
@@ -44,6 +42,8 @@ body:
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||
@@ -58,14 +58,15 @@ body:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Get help from the community on Discord
|
||||
- name: Get help on Discord
|
||||
url: https://discord.gg/H5MNcFW63r
|
||||
about: Join the yt-dlp Discord for community-powered support!
|
||||
- name: Matrix Bridge to the Discord server
|
||||
url: https://matrix.to/#/#yt-dlp:matrix.org
|
||||
about: For those who do not want to use Discord
|
||||
about: Join the yt-dlp Discord server for support and discussion
|
||||
|
||||
@@ -18,9 +18,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
||||
@@ -18,9 +18,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
description: Request new functionality for a site supported by yt-dlp
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
@@ -16,9 +16,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
|
||||
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
8
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -14,13 +14,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
description: Request a new feature unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
@@ -16,9 +16,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
|
||||
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
6
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -1,5 +1,5 @@
|
||||
name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
description: Ask a question about using yt-dlp
|
||||
labels: [question]
|
||||
body:
|
||||
%(no_skip)s
|
||||
@@ -22,9 +22,9 @@ body:
|
||||
required: true
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
|
||||
38
.github/PULL_REQUEST_TEMPLATE.md
vendored
38
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,17 @@
|
||||
**IMPORTANT**: PRs without the template will be CLOSED
|
||||
<!--
|
||||
**IMPORTANT**: PRs without the template will be CLOSED
|
||||
|
||||
Due to the high volume of pull requests, it may be a while before your PR is reviewed.
|
||||
Please try to keep your pull request focused on a single bugfix or new feature.
|
||||
Pull requests with a vast scope and/or very large diff will take much longer to review.
|
||||
It is recommended for new contributors to stick to smaller pull requests, so you can receive much more immediate feedback as you familiarize yourself with the codebase.
|
||||
|
||||
PLEASE AVOID FORCE-PUSHING after opening a PR, as it makes reviewing more difficult.
|
||||
-->
|
||||
|
||||
### Description of your *pull request* and other information
|
||||
|
||||
<!--
|
||||
|
||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||
|
||||
-->
|
||||
|
||||
ADD DESCRIPTION HERE
|
||||
ADD DETAILED DESCRIPTION HERE
|
||||
|
||||
Fixes #
|
||||
|
||||
@@ -16,24 +19,23 @@ ### Description of your *pull request* and other information
|
||||
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||
|
||||
<!--
|
||||
# PLEASE FOLLOW THE GUIDE BELOW
|
||||
|
||||
# PLEASE FOLLOW THE GUIDE BELOW
|
||||
|
||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||
- Use *Preview* tab to see how your *pull request* will actually look like
|
||||
|
||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||
- Use *Preview* tab to see what your *pull request* will actually look like
|
||||
-->
|
||||
|
||||
### Before submitting a *pull request* make sure you have:
|
||||
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||
|
||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
|
||||
### What is the purpose of your *pull request*?
|
||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||
- [ ] Core bug fix/improvement
|
||||
|
||||
28
.github/actionlint.yml
vendored
Normal file
28
.github/actionlint.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
self-hosted-runner:
|
||||
labels:
|
||||
# Workaround for the outdated runner list in actionlint v1.7.7
|
||||
# Ref: https://github.com/rhysd/actionlint/issues/533
|
||||
- windows-11-arm
|
||||
|
||||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
- PYPI_SUFFIX
|
||||
- NIGHTLY_PYPI_PROJECT
|
||||
- NIGHTLY_PYPI_SUFFIX
|
||||
- NIGHTLY_ARCHIVE_REPO
|
||||
- BUILD_NIGHTLY
|
||||
- MASTER_PYPI_PROJECT
|
||||
- MASTER_PYPI_SUFFIX
|
||||
- MASTER_ARCHIVE_REPO
|
||||
- BUILD_MASTER
|
||||
- ISSUE_LOCKDOWN
|
||||
- SANITIZE_COMMENT
|
||||
|
||||
paths:
|
||||
.github/workflows/build.yml:
|
||||
ignore:
|
||||
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||
- '.+SC1090.+'
|
||||
659
.github/workflows/build.yml
vendored
659
.github/workflows/build.yml
vendored
@@ -9,31 +9,27 @@ on:
|
||||
required: false
|
||||
default: stable
|
||||
type: string
|
||||
origin:
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
linux:
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
linux_armv7l:
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
default: true
|
||||
type: boolean
|
||||
macos_legacy:
|
||||
default: true
|
||||
type: boolean
|
||||
windows:
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
@@ -43,7 +39,9 @@ on:
|
||||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
required: true
|
||||
(default: auto-generated)
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
channel:
|
||||
description: |
|
||||
@@ -55,37 +53,26 @@ on:
|
||||
description: yt-dlp, yt-dlp.tar.gz
|
||||
default: true
|
||||
type: boolean
|
||||
linux_static:
|
||||
description: yt-dlp_linux
|
||||
linux:
|
||||
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
linux_arm:
|
||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||
linux_armv7l:
|
||||
description: yt-dlp_linux_armv7l.zip
|
||||
default: true
|
||||
type: boolean
|
||||
musllinux:
|
||||
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
macos:
|
||||
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||
default: true
|
||||
type: boolean
|
||||
macos_legacy:
|
||||
description: yt-dlp_macos_legacy
|
||||
default: true
|
||||
type: boolean
|
||||
windows:
|
||||
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
||||
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
windows32:
|
||||
description: yt-dlp_x86.exe
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
description: Origin
|
||||
required: false
|
||||
default: 'current repo'
|
||||
type: choice
|
||||
options:
|
||||
- 'current repo'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -94,44 +81,153 @@ jobs:
|
||||
process:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_origin.outputs.origin }}
|
||||
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||
version: ${{ steps.process_inputs.outputs.version }}
|
||||
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||
|
||||
steps:
|
||||
- name: Process origin
|
||||
id: process_origin
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
shell: python
|
||||
run: |
|
||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||
version = INPUTS.get('version')
|
||||
if version and '.' not in version:
|
||||
# build.yml was dispatched with only a revision as the version input value
|
||||
version_parts = [*timestamp.split('.')[:3], version]
|
||||
elif not version:
|
||||
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||
version_parts = timestamp.split('.')[:4]
|
||||
else:
|
||||
# build.yml was called or dispatched with a complete version input value
|
||||
version_parts = version.split('.')
|
||||
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||
outputs = {
|
||||
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||
'timestamp': timestamp,
|
||||
'version': '.'.join(version_parts),
|
||||
}
|
||||
print(json.dumps(outputs, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
|
||||
- name: Build Linux matrix
|
||||
id: linux_matrix
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PYTHON_VERSION: '3.13'
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
shell: python
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
EXE_MAP = {
|
||||
'linux': [{
|
||||
'os': 'linux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
}, {
|
||||
'os': 'linux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
}],
|
||||
'linux_armv7l': [{
|
||||
'os': 'linux',
|
||||
'arch': 'armv7l',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'qemu_platform': 'linux/arm/v7',
|
||||
'onefile': False,
|
||||
'cache_requirements': True,
|
||||
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||
}],
|
||||
'musllinux': [{
|
||||
'os': 'musllinux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
'python_version': '3.14',
|
||||
}, {
|
||||
'os': 'musllinux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'python_version': '3.14',
|
||||
}],
|
||||
}
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||
if not matrix:
|
||||
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||
matrix = [EXE_MAP['linux'][0]]
|
||||
for exe in matrix:
|
||||
exe['exe'] = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
exe['os'],
|
||||
exe['arch'] != 'x86_64' and exe['arch'],
|
||||
)))
|
||||
exe.setdefault('qemu_platform', None)
|
||||
exe.setdefault('onefile', True)
|
||||
exe.setdefault('onedir', True)
|
||||
exe.setdefault('cache_requirements', False)
|
||||
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||
print('skipping linux job')
|
||||
else:
|
||||
print(json.dumps(matrix, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f'matrix={json.dumps(matrix)}')
|
||||
|
||||
unix:
|
||||
needs: process
|
||||
if: inputs.unix
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
- uses: actions/setup-python@v5
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
make all-extra tar
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./yt-dlp
|
||||
cp ./yt-dlp ./yt-dlp_downgraded
|
||||
version="$(./yt-dlp --version)"
|
||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -141,137 +237,153 @@ jobs:
|
||||
yt-dlp.tar.gz
|
||||
compression-level: 0
|
||||
|
||||
linux_static:
|
||||
linux:
|
||||
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||
needs: process
|
||||
if: inputs.linux_static
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build static executable
|
||||
env:
|
||||
channel: ${{ inputs.channel }}
|
||||
origin: ${{ needs.process.outputs.origin }}
|
||||
version: ${{ inputs.version }}
|
||||
run: |
|
||||
mkdir ~/build
|
||||
cd bundle/docker
|
||||
docker compose up --build static
|
||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ~/build/yt-dlp_linux
|
||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
||||
version="$(~/build/yt-dlp_linux --version)"
|
||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
~/build/yt-dlp_linux
|
||||
compression-level: 0
|
||||
|
||||
linux_arm:
|
||||
needs: process
|
||||
if: inputs.linux_arm
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # for creating cache
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
architecture:
|
||||
- armv7
|
||||
- aarch64
|
||||
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: ${{ matrix.exe }}
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ./repo
|
||||
- name: Virtualized Install, Prepare & Build
|
||||
uses: yt-dlp/run-on-arch-action@v2
|
||||
with:
|
||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||
env: |
|
||||
GITHUB_WORKFLOW: build
|
||||
githubToken: ${{ github.token }} # To cache image
|
||||
arch: ${{ matrix.architecture }}
|
||||
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||
apt update
|
||||
apt -y install zlib1g-dev libffi-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||
python3.8 -m pip install -U pip setuptools wheel
|
||||
# Cannot access any files from the repo directory at this stage
|
||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi secretstorage cffi
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
run: |
|
||||
cd repo
|
||||
python3.8 devscripts/install_deps.py -o --include build
|
||||
python3.8 devscripts/install_deps.py --include pyinstaller --include secretstorage # Cached version may be out of date
|
||||
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3.8 devscripts/make_lazy_extractors.py
|
||||
python3.8 -m bundle.pyinstaller
|
||||
- name: Cache requirements
|
||||
if: matrix.cache_requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
venv
|
||||
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||
|
||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
fi
|
||||
- name: Set up QEMU
|
||||
if: matrix.qemu_platform
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: ${{ matrix.qemu_platform }}
|
||||
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||
run: |
|
||||
mkdir -p ./venv
|
||||
mkdir -p ./dist
|
||||
pushd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
popd
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
fi
|
||||
|
||||
- name: Verify executable in container
|
||||
env:
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-linux_${{ matrix.architecture }}
|
||||
path: | # run-on-arch-action designates armv7l as armv7
|
||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||
path: |
|
||||
dist/${{ matrix.exe }}*
|
||||
compression-level: 0
|
||||
|
||||
macos:
|
||||
needs: process
|
||||
if: inputs.macos
|
||||
runs-on: macos-11
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: macos-14
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||
cache-reqs-${{ github.job }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||
# We need to use system Python in order to roll our own universal2 curl_cffi wheel
|
||||
brew uninstall --ignore-dependencies python3
|
||||
python3 -m venv ~/yt-dlp-build-venv
|
||||
source ~/yt-dlp-build-venv/bin/activate
|
||||
python3 devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
python3 devscripts/install_deps.py --print --include-group pyinstaller > requirements.txt
|
||||
# We need to ignore wheels otherwise we break universal2 builds
|
||||
python3 -m pip install -U --user --no-binary :all: -r requirements.txt
|
||||
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
||||
# We need to fuse our own universal2 wheels for curl_cffi
|
||||
python3 -m pip install -U --user delocate
|
||||
python3 -m pip install -U 'delocate==0.11.0'
|
||||
mkdir curl_cffi_whls curl_cffi_universal2
|
||||
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
||||
python3 devscripts/install_deps.py --print --only-optional-groups --include-group curl-cffi > requirements.txt
|
||||
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||
python3 -m pip download \
|
||||
--only-binary=:all: \
|
||||
--platform "${platform}" \
|
||||
--pre -d curl_cffi_whls \
|
||||
-d curl_cffi_whls \
|
||||
-r requirements.txt
|
||||
done
|
||||
( # Overwrite x86_64-only libs with fat/universal2 libs or else Pyinstaller will do the opposite
|
||||
# See https://github.com/yt-dlp/yt-dlp/pull/10069
|
||||
cd curl_cffi_whls
|
||||
mkdir -p curl_cffi/.dylibs
|
||||
python_libdir=$(python3 -c 'import sys; from pathlib import Path; print(Path(sys.path[1]).parent)')
|
||||
for dylib in lib{ssl,crypto}.3.dylib; do
|
||||
cp "${python_libdir}/${dylib}" "curl_cffi/.dylibs/${dylib}"
|
||||
for wheel in curl_cffi*macos*x86_64.whl; do
|
||||
zip "${wheel}" "curl_cffi/.dylibs/${dylib}"
|
||||
done
|
||||
done
|
||||
)
|
||||
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/curl_cffi*.whl -w curl_cffi_universal2
|
||||
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/cffi*.whl -w curl_cffi_universal2
|
||||
cd curl_cffi_universal2
|
||||
for wheel in *cffi*.whl; do mv -n -- "${wheel}" "${wheel/x86_64/universal2}"; done
|
||||
python3 -m pip install -U --user *cffi*.whl
|
||||
for wheel in curl_cffi_universal2/*cffi*.whl; do
|
||||
mv -n -- "${wheel}" "${wheel/x86_64/universal2}"
|
||||
done
|
||||
python3 -m pip install --force-reinstall -U curl_cffi_universal2/*cffi*.whl
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
source ~/yt-dlp-build-venv/bin/activate
|
||||
python3 -m bundle.pyinstaller --target-architecture universal2 --onedir
|
||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||
python3 -m bundle.pyinstaller --target-architecture universal2
|
||||
@@ -282,7 +394,7 @@ jobs:
|
||||
chmod +x ./dist/yt-dlp_macos
|
||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||
version="$(./dist/yt-dlp_macos --version)"
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
@@ -295,161 +407,124 @@ jobs:
|
||||
dist/yt-dlp_macos.zip
|
||||
compression-level: 0
|
||||
|
||||
macos_legacy:
|
||||
needs: process
|
||||
if: inputs.macos_legacy
|
||||
runs-on: macos-12
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
env:
|
||||
PYTHON_VERSION: 3.10.5
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||
run: |
|
||||
# Hack to get the latest patch version. Uncomment if needed
|
||||
#brew install python@3.10
|
||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
||||
sudo installer -pkg python.pkg -target /
|
||||
python3 --version
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python3 -m bundle.pyinstaller
|
||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./dist/yt-dlp_macos_legacy
|
||||
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
||||
version="$(./dist/yt-dlp_macos_legacy --version)"
|
||||
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_macos_legacy
|
||||
compression-level: 0
|
||||
|
||||
windows:
|
||||
name: windows (${{ matrix.arch }})
|
||||
needs: process
|
||||
if: inputs.windows
|
||||
runs-on: windows-latest
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: 'x64'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
platform_tag: win_amd64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: b6496c7630c3afe66900cfa824e8234a8c2e2c81704bd7facd79586abc76c0e5
|
||||
- arch: 'x86'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
platform_tag: win32
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 2d881843580efdc54f3523507fc6d9c5b6051ee49c743a6d9b7003ac5758c226
|
||||
- arch: 'arm64'
|
||||
runner: windows-11-arm
|
||||
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||
platform_tag: win_arm64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 4250c9085e34a95c898f3ee2f764914fc36ec59f0d97c28e6a75fcf21f7b144f
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with: # 3.8 is used for Win7 support
|
||||
python-version: "3.8"
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
architecture: ${{ matrix.arch }}
|
||||
|
||||
- name: Cache requirements
|
||||
id: cache-venv
|
||||
if: matrix.arch == 'arm64'
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
/yt-dlp-build-venv
|
||||
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||
${{ env.BASE_CACHE_KEY }}-
|
||||
|
||||
- name: Install Requirements
|
||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --include curl-cffi
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl"
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
python -m venv /yt-dlp-build-venv
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m pip install -U pip
|
||||
# Install custom PyInstaller build and verify hash
|
||||
mkdir /pyi-wheels
|
||||
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
if ("${Env:ARCH}" -eq "x86") {
|
||||
python devscripts/install_deps.py
|
||||
} else {
|
||||
python devscripts/install_deps.py --include-group curl-cffi
|
||||
}
|
||||
|
||||
- name: Prepare
|
||||
shell: pwsh
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build
|
||||
shell: pwsh
|
||||
run: |
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m bundle.pyinstaller
|
||||
python -m bundle.pyinstaller --onedir
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_real.exe
|
||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||
|
||||
- name: Install Requirements (py2exe)
|
||||
run: |
|
||||
python devscripts/install_deps.py --include py2exe
|
||||
- name: Build (py2exe)
|
||||
run: |
|
||||
python -m bundle.py2exe
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||
Move-Item ./dist/yt-dlp_real.exe ./dist/yt-dlp.exe
|
||||
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
shell: pwsh
|
||||
run: |
|
||||
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
$name = "yt-dlp${Env:SUFFIX}"
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||
path: |
|
||||
dist/yt-dlp.exe
|
||||
dist/yt-dlp_min.exe
|
||||
dist/yt-dlp_win.zip
|
||||
compression-level: 0
|
||||
|
||||
windows32:
|
||||
needs: process
|
||||
if: inputs.windows32
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.8"
|
||||
architecture: "x86"
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python -m bundle.pyinstaller
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
foreach ($name in @("yt-dlp_x86")) {
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_x86.exe
|
||||
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
@@ -457,15 +532,13 @@ jobs:
|
||||
needs:
|
||||
- process
|
||||
- unix
|
||||
- linux_static
|
||||
- linux_arm
|
||||
- linux
|
||||
- macos
|
||||
- macos_legacy
|
||||
- windows
|
||||
- windows32
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
@@ -475,29 +548,57 @@ jobs:
|
||||
run: |
|
||||
cd ./artifact/
|
||||
# make sure SHA sums are also printed to stdout
|
||||
sha256sum * | tee ../SHA2-256SUMS
|
||||
sha512sum * | tee ../SHA2-512SUMS
|
||||
sha256sum -- * | tee ../SHA2-256SUMS
|
||||
sha512sum -- * | tee ../SHA2-512SUMS
|
||||
# also print as permanent annotations to the summary page
|
||||
while read -r shasum; do
|
||||
echo "::notice title=${shasum##* }::sha256: ${shasum% *}"
|
||||
done < ../SHA2-256SUMS
|
||||
|
||||
- name: Make Update spec
|
||||
run: |
|
||||
cat >> _update_spec << EOF
|
||||
# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3\.6
|
||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lock 2023.11.16 zip Python 3\.7
|
||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lock 2024.10.22 py2exe .+
|
||||
lock 2024.10.22 zip Python 3\.8
|
||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lock 2025.08.11 darwin_legacy_exe .+
|
||||
lock 2025.08.27 linux_armv7l_exe .+
|
||||
lock 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||
EOF
|
||||
|
||||
- name: Sign checksum files
|
||||
env:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
if: env.GPG_SIGNING_KEY != ''
|
||||
if: env.GPG_SIGNING_KEY
|
||||
run: |
|
||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||
for signfile in ./SHA*SUMS; do
|
||||
|
||||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Keep cache warm
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: |
|
||||
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
musllinux: false
|
||||
macos: true
|
||||
windows: true
|
||||
permissions:
|
||||
contents: read
|
||||
77
.github/workflows/challenge-tests.yml
vendored
Normal file
77
.github/workflows/challenge-tests.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
name: Challenge Tests
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/challenge-tests.yml
|
||||
- test/test_jsc/*.py
|
||||
- yt_dlp/extractor/youtube/jsc/**.js
|
||||
- yt_dlp/extractor/youtube/jsc/**.py
|
||||
- yt_dlp/extractor/youtube/pot/**.py
|
||||
- yt_dlp/utils/_jsruntime.py
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: challenge-tests-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Challenge Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
env:
|
||||
QJS_VERSION: '2025-04-26' # Earliest version with rope strings
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v2
|
||||
with:
|
||||
deno-version: '2.0.0' # minimum supported version
|
||||
- name: Install Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
# minimum supported version is 1.0.31 but earliest available Windows version is 1.1.0
|
||||
bun-version: ${{ (matrix.os == 'windows-latest' && '1.1.0') || '1.0.31' }}
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '20.0' # minimum supported version
|
||||
- name: Install QuickJS (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
wget "https://bellard.org/quickjs/binary_releases/quickjs-linux-x86_64-${QJS_VERSION}.zip" -O quickjs.zip
|
||||
unzip quickjs.zip qjs
|
||||
sudo install qjs /usr/local/bin/qjs
|
||||
- name: Install QuickJS (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
shell: pwsh
|
||||
run: |
|
||||
Invoke-WebRequest "https://bellard.org/quickjs/binary_releases/quickjs-win-x86_64-${Env:QJS_VERSION}.zip" -OutFile quickjs.zip
|
||||
unzip quickjs.zip
|
||||
- name: Install test requirements
|
||||
run: |
|
||||
python ./devscripts/install_deps.py --print --only-optional-groups --include-group test > requirements.txt
|
||||
python ./devscripts/install_deps.py --print -c certifi -c requests -c urllib3 -c yt-dlp-ejs >> requirements.txt
|
||||
python -m pip install -U -r requirements.txt
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python -m yt_dlp -v --js-runtimes node --js-runtimes bun --js-runtimes quickjs || true
|
||||
python ./devscripts/run_tests.py test/test_jsc -k download
|
||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -29,11 +29,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -60,6 +60,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
50
.github/workflows/core.yml
vendored
50
.github/workflows/core.yml
vendored
@@ -6,7 +6,8 @@ on:
|
||||
- devscripts/**
|
||||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/*.py'
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
@@ -16,7 +17,8 @@ on:
|
||||
- devscripts/**
|
||||
- test/**
|
||||
- yt_dlp/**.py
|
||||
- '!yt_dlp/extractor/*.py'
|
||||
- '!yt_dlp/extractor/**.py'
|
||||
- yt_dlp/extractor/youtube/**.py
|
||||
- yt_dlp/extractor/__init__.py
|
||||
- yt_dlp/extractor/common.py
|
||||
- yt_dlp/extractor/extractors.py
|
||||
@@ -36,26 +38,54 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
# CPython 3.8 is in quick-test
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||
# CPython 3.10 is in quick-test
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: '3.11'
|
||||
- os: windows-latest
|
||||
python-version: '3.12'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
python-version: '3.13'
|
||||
- os: windows-latest
|
||||
python-version: '3.14'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
||||
run: python ./devscripts/install_deps.py --include-group test --include-group curl-cffi
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
continue-on-error: False
|
||||
env:
|
||||
source: ${{ (github.event_name == 'push' && github.event.before) || 'origin/master' }}
|
||||
target: ${{ (github.event_name == 'push' && github.event.after) || 'HEAD' }}
|
||||
shell: bash
|
||||
run: |
|
||||
flags=()
|
||||
# Check if a networking file is involved
|
||||
patterns="\
|
||||
^yt_dlp/networking/
|
||||
^yt_dlp/utils/networking\.py$
|
||||
^test/test_http_proxy\.py$
|
||||
^test/test_networking\.py$
|
||||
^test/test_networking_utils\.py$
|
||||
^test/test_socks\.py$
|
||||
^test/test_websockets\.py$
|
||||
^pyproject\.toml$
|
||||
"
|
||||
if git diff --name-only "${source}" "${target}" | grep -Ef <(printf '%s' "${patterns}"); then
|
||||
flags+=(--flaky)
|
||||
fi
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
python3 ./devscripts/run_tests.py core
|
||||
python3 -m devscripts.run_tests "${flags[@]}" --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
|
||||
24
.github/workflows/download.yml
vendored
24
.github/workflows/download.yml
vendored
@@ -9,16 +9,16 @@ jobs:
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
||||
full:
|
||||
name: Full Download Tests
|
||||
@@ -28,21 +28,21 @@ jobs:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', pypy-3.8, pypy-3.10]
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python ./devscripts/install_deps.py --include-group dev
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: python3 ./devscripts/run_tests.py download
|
||||
run: python ./devscripts/run_tests.py download
|
||||
|
||||
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Issue Lockdown
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
lockdown:
|
||||
name: Issue Lockdown
|
||||
if: vars.ISSUE_LOCKDOWN
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "Lock new issue"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue lock "${ISSUE_NUMBER}" -R "${REPOSITORY}"
|
||||
25
.github/workflows/quick-test.yml
vendored
25
.github/workflows/quick-test.yml
vendored
@@ -9,31 +9,34 @@ jobs:
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include test
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group test
|
||||
- name: Run tests
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true
|
||||
python3 ./devscripts/run_tests.py core
|
||||
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||
check:
|
||||
name: Code check
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.10'
|
||||
- name: Install dev dependencies
|
||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||
run: python ./devscripts/install_deps.py --only-optional-groups --include-group static-analysis
|
||||
- name: Make lazy extractors
|
||||
run: python3 ./devscripts/make_lazy_extractors.py
|
||||
run: python ./devscripts/make_lazy_extractors.py
|
||||
- name: Run ruff
|
||||
run: ruff check --output-format github .
|
||||
- name: Run autopep8
|
||||
run: autopep8 --diff .
|
||||
- name: Check file mode
|
||||
run: git ls-files --format="%(objectmode) %(path)" yt_dlp/ | ( ! grep -v "^100644" )
|
||||
|
||||
27
.github/workflows/release-master.yml
vendored
27
.github/workflows/release-master.yml
vendored
@@ -6,10 +6,12 @@ on:
|
||||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
- "bundle/*.py"
|
||||
- "bundle/**"
|
||||
- "pyproject.toml"
|
||||
- "Makefile"
|
||||
- ".github/workflows/build.yml"
|
||||
- ".github/workflows/release.yml"
|
||||
- ".github/workflows/release-master.yml"
|
||||
concurrency:
|
||||
group: release-master
|
||||
permissions:
|
||||
@@ -17,13 +19,30 @@ permissions:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
if: vars.BUILD_MASTER != ''
|
||||
if: vars.BUILD_MASTER
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: master
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||
target: 'master'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.MASTER_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
|
||||
29
.github/workflows/release-nightly.yml
vendored
29
.github/workflows/release-nightly.yml
vendored
@@ -7,12 +7,12 @@ permissions:
|
||||
|
||||
jobs:
|
||||
check_nightly:
|
||||
if: vars.BUILD_NIGHTLY != ''
|
||||
if: vars.BUILD_NIGHTLY
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for new commits
|
||||
@@ -22,9 +22,13 @@ jobs:
|
||||
"yt_dlp/*.py"
|
||||
':!yt_dlp/version.py'
|
||||
"bundle/*.py"
|
||||
"bundle/docker/compose.yml"
|
||||
"bundle/docker/linux/*"
|
||||
"pyproject.toml"
|
||||
"Makefile"
|
||||
".github/workflows/build.yml"
|
||||
".github/workflows/release.yml"
|
||||
".github/workflows/release-nightly.yml"
|
||||
)
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
@@ -34,9 +38,26 @@ jobs:
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: nightly
|
||||
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||
target: 'nightly'
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
||||
publish_pypi:
|
||||
needs: [release]
|
||||
if: vars.NIGHTLY_PYPI_PROJECT
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
|
||||
307
.github/workflows/release.yml
vendored
307
.github/workflows/release.yml
vendored
@@ -2,10 +2,6 @@ name: Release
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
source:
|
||||
required: false
|
||||
default: ''
|
||||
@@ -18,6 +14,14 @@ on:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source:
|
||||
@@ -43,6 +47,10 @@ on:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
linux_armv7l:
|
||||
description: Include linux_armv7l
|
||||
default: true
|
||||
type: boolean
|
||||
prerelease:
|
||||
description: Pre-release
|
||||
default: false
|
||||
@@ -67,145 +75,67 @@ jobs:
|
||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
run: |
|
||||
cat << EOF
|
||||
::group::Inputs
|
||||
prerelease=${{ inputs.prerelease }}
|
||||
source=${{ inputs.source }}
|
||||
target=${{ inputs.target }}
|
||||
version=${{ inputs.version }}
|
||||
::endgroup::
|
||||
EOF
|
||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||
cat << EOF >> "$GITHUB_OUTPUT"
|
||||
source_repo=${source_repo}
|
||||
source_tag=${source_tag}
|
||||
target_repo=${target_repo}
|
||||
target_tag=${target_tag}
|
||||
EOF
|
||||
python -m devscripts.setup_variables process_inputs
|
||||
|
||||
- name: Setup variables
|
||||
id: setup_variables
|
||||
env:
|
||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||
run: |
|
||||
# unholy bash monstrosity (sincere apologies)
|
||||
fallback_token () {
|
||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||
exit 1
|
||||
fi
|
||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||
return 0
|
||||
}
|
||||
python -m devscripts.setup_variables
|
||||
|
||||
source_is_channel=0
|
||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||
if [[ -z "${source_repo}" ]]; then
|
||||
source_repo='${{ github.repository }}'
|
||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||
source_is_channel=1
|
||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||
source_tag="${source_repo}"
|
||||
source_repo='${{ github.repository }}'
|
||||
fi
|
||||
resolved_source="${source_repo}"
|
||||
if [[ "${source_tag}" ]]; then
|
||||
resolved_source="${resolved_source}@${source_tag}"
|
||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||
resolved_source='stable'
|
||||
fi
|
||||
|
||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||
version="$(
|
||||
python devscripts/update-version.py \
|
||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||
|
||||
if [[ "${target_repo}" ]]; then
|
||||
if [[ -z "${target_tag}" ]]; then
|
||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||
target_tag="${source_tag:-${version}}"
|
||||
else
|
||||
target_tag="${target_repo}"
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||
fi
|
||||
else
|
||||
target_tag="${source_tag:-${version}}"
|
||||
if ((source_is_channel)); then
|
||||
target_repo="${source_channel}"
|
||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||
else
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||
fi
|
||||
|
||||
echo "::group::Output variables"
|
||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||
channel=${resolved_source}
|
||||
version=${version}
|
||||
target_repo=${target_repo}
|
||||
target_repo_token=${target_repo_token}
|
||||
target_tag=${target_tag}
|
||||
pypi_project=${pypi_project}
|
||||
pypi_suffix=${pypi_suffix}
|
||||
EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Update documentation
|
||||
- name: Update version & documentation
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
make doc
|
||||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add -u
|
||||
git commit -m "Release ${{ env.version }}" \
|
||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||
git push origin --force ${{ github.event.ref }}:release
|
||||
git commit -m "Release ${VERSION}" \
|
||||
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||
|
||||
- name: Get target commitish
|
||||
id: get_target
|
||||
@@ -214,10 +144,10 @@ jobs:
|
||||
|
||||
- name: Update master
|
||||
env:
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||
if: |
|
||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||
run: git push origin ${{ github.event.ref }}
|
||||
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||
run: git push origin "${GITHUB_EVENT_REF}"
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
@@ -226,9 +156,9 @@ jobs:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # For package cache
|
||||
secrets:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
@@ -240,30 +170,30 @@ jobs:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install pandoc man
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --only-optional-groups --include-group build
|
||||
|
||||
- name: Prepare
|
||||
env:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
@@ -277,7 +207,17 @@ jobs:
|
||||
make clean-cache
|
||||
python -m build --no-isolation .
|
||||
|
||||
- name: Upload artifacts
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-pypi
|
||||
path: |
|
||||
dist/*
|
||||
compression-level: 0
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
verbose: true
|
||||
@@ -287,97 +227,100 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
env:
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
BASE_REPO: yt-dlp/yt-dlp
|
||||
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||
run: |
|
||||
printf '%s' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||
'[]' \
|
||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||
'[]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||
'[]' \
|
||||
'(https://github.com/${{ github.repository }}' \
|
||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||
"[]" \
|
||||
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${BASE_REPO}/blob/master/Maintainers.md#maintainers \"Donate\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||
printf '%s' \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||
"[]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files are in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||
fi
|
||||
printf '\n\n%s\n\n%s%s%s\n\n---\n' \
|
||||
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||
"The zipimport Unix executable contains code licensed under ISC and MIT. " \
|
||||
"The PyInstaller-bundled executables are subject to these and other licenses, all of which are compiled in " \
|
||||
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||
|
||||
- name: Publish to archive repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||
if: |
|
||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||
run: |
|
||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||
gh release create \
|
||||
--notes-file ARCHIVE_NOTES \
|
||||
--title "${title} ${{ env.version }}" \
|
||||
${{ env.version }} \
|
||||
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||
"${VERSION}" \
|
||||
artifact/*
|
||||
|
||||
- name: Prune old release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
if: |
|
||||
env.target_repo == github.repository && env.target_tag != env.version
|
||||
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||
run: |
|
||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||
git tag --delete "${{ env.target_tag }}" || true
|
||||
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||
git tag --delete "${TARGET_TAG}" || true
|
||||
sleep 5 # Enough time to cover deletion race condition
|
||||
|
||||
- name: Publish release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||
if: |
|
||||
env.target_repo == github.repository
|
||||
env.TARGET_REPO == github.repository
|
||||
run: |
|
||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ env.head_sha }} \
|
||||
--title "${title}${{ env.version }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
${{ env.target_tag }} \
|
||||
artifact/*
|
||||
gh_options=(
|
||||
--notes-file "${NOTES_FILE}"
|
||||
--target "${HEAD_SHA}"
|
||||
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||
)
|
||||
if ((PRERELEASE)); then
|
||||
gh_options+=(--prerelease)
|
||||
fi
|
||||
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||
|
||||
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Sanitize comment
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created, edited]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
sanitize-comment:
|
||||
name: Sanitize comment
|
||||
if: vars.SANITIZE_COMMENT && !github.event.issue.pull_request
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Sanitize comment
|
||||
uses: yt-dlp/sanitize-comment@v1
|
||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.7"
|
||||
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
|
||||
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check workflows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
run: |
|
||||
python -m devscripts.install_deps --only-optional-groups --include-group test
|
||||
sudo apt -y install shellcheck
|
||||
python -m pip install -U pyflakes
|
||||
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||
chmod +x actionlint
|
||||
- name: Run actionlint
|
||||
run: |
|
||||
./actionlint -color
|
||||
- name: Check Docker shell scripts
|
||||
run: |
|
||||
shellcheck bundle/docker/linux/*.sh
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -51,7 +51,6 @@ cookies
|
||||
*.srt
|
||||
*.ssa
|
||||
*.swf
|
||||
*.swp
|
||||
*.tt
|
||||
*.ttml
|
||||
*.url
|
||||
@@ -93,6 +92,7 @@ updates_key.pem
|
||||
*.class
|
||||
*.isorted
|
||||
*.stackdump
|
||||
uv.lock
|
||||
|
||||
# Generated
|
||||
AUTHORS
|
||||
@@ -105,6 +105,9 @@ README.txt
|
||||
*.zsh
|
||||
*.spec
|
||||
test/testdata/sigs/player-*.js
|
||||
test/testdata/thumbnails/empty.webp
|
||||
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||
.ejs-*
|
||||
|
||||
# Binary
|
||||
/youtube-dl
|
||||
@@ -119,6 +122,7 @@ yt-dlp.zip
|
||||
.vscode
|
||||
*.sublime-*
|
||||
*.code-workspace
|
||||
*.swp
|
||||
|
||||
# Lazy extractors
|
||||
*/extractor/lazy_extractors.py
|
||||
@@ -126,3 +130,6 @@ yt-dlp.zip
|
||||
# Plugins
|
||||
ytdlp_plugins/
|
||||
yt-dlp-plugins
|
||||
|
||||
# Packages
|
||||
yt_dlp_ejs/
|
||||
|
||||
@@ -12,6 +12,7 @@ # CONTRIBUTING TO YT-DLP
|
||||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||
@@ -37,14 +38,18 @@ # OPENING AN ISSUE
|
||||
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
||||
```
|
||||
$ yt-dlp -vU <your command line>
|
||||
[debug] Command-line config: ['-v', 'demo.com']
|
||||
[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8
|
||||
[debug] yt-dlp version 2021.09.25 (zip)
|
||||
[debug] Python version 3.8.10 (CPython 64bit) - Linux-5.4.0-74-generic-x86_64-with-glibc2.29
|
||||
[debug] exe versions: ffmpeg 4.2.4, ffprobe 4.2.4
|
||||
[debug] Command-line config: ['-vU', 'https://www.example.com/']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
Current Build Hash 25cc412d1d3c0725a1f2f5b7e4682f6fb40e6d15f7024e96f7afd572e9919535
|
||||
yt-dlp is up to date (2021.09.25)
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
...
|
||||
```
|
||||
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
||||
@@ -122,14 +127,25 @@ ### Are you willing to share account details if needed?
|
||||
While these steps won't necessarily ensure that no misuse of the account takes place, these are still some good practices to follow.
|
||||
|
||||
- Look for people with `Member` (maintainers of the project) or `Contributor` (people who have previously contributed code) tag on their messages.
|
||||
- Change the password before sharing the account to something random (use [this](https://passwordsgenerator.net/) if you don't have a random password generator).
|
||||
- Change the password before sharing the account to something random.
|
||||
- Change the password after receiving the account back.
|
||||
|
||||
### Is the website primarily used for piracy?
|
||||
|
||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||
|
||||
|
||||
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||
|
||||
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||
|
||||
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||
|
||||
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||
|
||||
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||
|
||||
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||
|
||||
|
||||
# DEVELOPER INSTRUCTIONS
|
||||
@@ -215,8 +231,8 @@ ## Adding support for a new site
|
||||
|
||||
```python
|
||||
from .common import InfoExtractor
|
||||
|
||||
|
||||
|
||||
|
||||
class YourExtractorIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?yourextractor\.com/watch/(?P<id>[0-9]+)'
|
||||
_TESTS = [{
|
||||
@@ -233,7 +249,7 @@ ## Adding support for a new site
|
||||
# * MD5 checksum; start the string with 'md5:', e.g.
|
||||
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
||||
# * A regular expression; start the string with 're:', e.g.
|
||||
# 'thumbnail': r're:^https?://.*\.jpg$',
|
||||
# 'thumbnail': r're:https?://.*\.jpg$',
|
||||
# * A count of elements in a list; start the string with 'count:', e.g.
|
||||
# 'tags': 'count:10',
|
||||
# * Any Python type, e.g.
|
||||
@@ -244,7 +260,7 @@ ## Adding support for a new site
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
|
||||
# TODO more code goes here, for example ...
|
||||
title = self._html_search_regex(r'<h1>(.+?)</h1>', webpage, 'title')
|
||||
|
||||
@@ -266,9 +282,9 @@ ## Adding support for a new site
|
||||
$ hatch fmt --check
|
||||
```
|
||||
|
||||
You can use `hatch fmt` to automatically fix problems.
|
||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.8 and above. Backward compatibility is not required for even older versions of Python.
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||
|
||||
```shell
|
||||
@@ -302,10 +318,9 @@ ### Mandatory and optional metafields
|
||||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||
|
||||
- `id` (media identifier)
|
||||
- `title` (media title)
|
||||
- `url` (media download URL) or `formats`
|
||||
|
||||
The aforementioned metafields are the critical data that the extraction does not make any sense without and if any of them fail to be extracted then the extractor is considered completely broken. While all extractors must return a `title`, they must also allow it's extraction to be non-fatal.
|
||||
The aforementioned metadata fields are the critical data without which extraction does not make any sense. If any of them fail to be extracted, then the extractor is considered broken. All other metadata extraction should be completely non-fatal.
|
||||
|
||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||
|
||||
@@ -320,7 +335,7 @@ #### Example
|
||||
```python
|
||||
meta = self._download_json(url, video_id)
|
||||
```
|
||||
|
||||
|
||||
Assume at this point `meta`'s layout is:
|
||||
|
||||
```python
|
||||
@@ -750,7 +765,7 @@ ### Use convenience conversion and parsing functions
|
||||
|
||||
Use `traverse_obj` and `try_call` (superseeds `dict_get` and `try_get`) for safe metadata extraction from parsed JSON.
|
||||
|
||||
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
||||
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
||||
|
||||
Explore [`yt_dlp/utils/`](yt_dlp/utils/) for more useful convenience functions.
|
||||
|
||||
@@ -765,12 +780,10 @@ #### Examples
|
||||
```
|
||||
|
||||
|
||||
# My pull request is labeled pending-fixes
|
||||
## My pull request is labeled pending-fixes
|
||||
|
||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||
|
||||
|
||||
|
||||
|
||||
# EMBEDDING YT-DLP
|
||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||
|
||||
200
CONTRIBUTORS
200
CONTRIBUTORS
@@ -1,9 +1,10 @@
|
||||
pukkandan (owner)
|
||||
shirt-dev (collaborator)
|
||||
coletdjnz/colethedj (collaborator)
|
||||
Ashish0804 (collaborator)
|
||||
bashonly (collaborator)
|
||||
Grub4K (collaborator)
|
||||
shirt-dev (maintainer)
|
||||
coletdjnz (maintainer)
|
||||
Ashish0804 (maintainer)
|
||||
bashonly (maintainer)
|
||||
Grub4K (maintainer)
|
||||
seproDev (maintainer)
|
||||
h-h-h-h
|
||||
pauldubois98
|
||||
nixxo
|
||||
@@ -403,7 +404,6 @@ rebane2001
|
||||
road-master
|
||||
rohieb
|
||||
sdht0
|
||||
seproDev
|
||||
Hill-98
|
||||
LXYan2333
|
||||
mushbite
|
||||
@@ -630,3 +630,191 @@ TuxCoder
|
||||
voidful
|
||||
vtexier
|
||||
WyohKnott
|
||||
trueauracoral
|
||||
ASertacAkkaya
|
||||
axpauls
|
||||
chilinux
|
||||
hafeoz
|
||||
JSubelj
|
||||
jucor
|
||||
megumintyan
|
||||
mgedmin
|
||||
Niluge-KiWi
|
||||
peisenwang
|
||||
TheZ3ro
|
||||
tippfehlr
|
||||
varunchopra
|
||||
DrakoCpp
|
||||
PatrykMis
|
||||
DinhHuy2010
|
||||
exterrestris
|
||||
harbhim
|
||||
LeSuisse
|
||||
DunnesH
|
||||
iancmy
|
||||
mokrueger
|
||||
luvyana
|
||||
szantnerb
|
||||
hugepower
|
||||
scribblemaniac
|
||||
Codenade
|
||||
Demon000
|
||||
Deukhoofd
|
||||
grqz
|
||||
hibes
|
||||
Khaoklong51
|
||||
kieraneglin
|
||||
lengzuo
|
||||
naglis
|
||||
ndyanx
|
||||
otovalek
|
||||
quad
|
||||
rakslice
|
||||
sahilsinghss73
|
||||
tony-hn
|
||||
xingchensong
|
||||
BallzCrasher
|
||||
coreywright
|
||||
eric321
|
||||
poyhen
|
||||
tetra-fox
|
||||
444995
|
||||
63427083
|
||||
allendema
|
||||
DarkZeros
|
||||
DTrombett
|
||||
imranh2
|
||||
KarboniteKream
|
||||
mikkovedru
|
||||
pktiuk
|
||||
rubyevadestaxes
|
||||
avagordon01
|
||||
CounterPillow
|
||||
JoseAngelB
|
||||
KBelmin
|
||||
kesor
|
||||
MellowKyler
|
||||
Wesley107772
|
||||
a13ssandr0
|
||||
ChocoLZS
|
||||
doe1080
|
||||
hugovdev
|
||||
jshumphrey
|
||||
julionc
|
||||
manavchaudhary1
|
||||
powergold1
|
||||
Sakura286
|
||||
SamDecrock
|
||||
stratus-ss
|
||||
subrat-lima
|
||||
gitninja1234
|
||||
jkruse
|
||||
xiaomac
|
||||
wesson09
|
||||
Crypto90
|
||||
MutantPiggieGolem1
|
||||
Sanceilaks
|
||||
Strkmn
|
||||
0x9fff00
|
||||
4ft35t
|
||||
7x11x13
|
||||
b5i
|
||||
cotko
|
||||
d3d9
|
||||
Dioarya
|
||||
finch71
|
||||
hexahigh
|
||||
InvalidUsernameException
|
||||
jixunmoe
|
||||
knackku
|
||||
krandor
|
||||
kvk-2015
|
||||
lonble
|
||||
msm595
|
||||
n10dollar
|
||||
NecroRomnt
|
||||
pjrobertson
|
||||
subsense
|
||||
test20140
|
||||
arantius
|
||||
entourage8
|
||||
lfavole
|
||||
mp3butcher
|
||||
slipinthedove
|
||||
YoshiTabletopGamer
|
||||
Arc8ne
|
||||
benfaerber
|
||||
chrisellsworth
|
||||
fries1234
|
||||
Kenshin9977
|
||||
MichaelDeBoey
|
||||
msikma
|
||||
pedro
|
||||
pferreir
|
||||
red-acid
|
||||
refack
|
||||
rysson
|
||||
somini
|
||||
thedenv
|
||||
vallovic
|
||||
arabcoders
|
||||
mireq
|
||||
mlabeeb03
|
||||
1271
|
||||
CasperMcFadden95
|
||||
Kicer86
|
||||
Kiritomo
|
||||
leeblackc
|
||||
meGAmeS1
|
||||
NeonMan
|
||||
pj47x
|
||||
troex
|
||||
WouterGordts
|
||||
baierjan
|
||||
GeoffreyFrogeye
|
||||
Pawka
|
||||
v3DJG6GL
|
||||
yozel
|
||||
brian6932
|
||||
iednod55
|
||||
maxbin123
|
||||
nullpos
|
||||
anlar
|
||||
eason1478
|
||||
ceandreasen
|
||||
chauhantirth
|
||||
helpimnotdrowning
|
||||
adamralph
|
||||
averageFOSSenjoyer
|
||||
bubo
|
||||
flanter21
|
||||
Georift
|
||||
moonshinerd
|
||||
R0hanW
|
||||
ShockedPlot7560
|
||||
swayll
|
||||
atsushi2965
|
||||
barryvan
|
||||
injust
|
||||
iribeirocampos
|
||||
rolandcrosby
|
||||
Sojiroh
|
||||
tchebb
|
||||
AzartX47
|
||||
e2dk4r
|
||||
junyilou
|
||||
PierreMesure
|
||||
Randalix
|
||||
runarmod
|
||||
gitchasing
|
||||
zakaryan2004
|
||||
cdce8p
|
||||
nicolaasjan
|
||||
willsmillie
|
||||
CasualYT31
|
||||
cecilia-sanare
|
||||
dhwz
|
||||
robin-mu
|
||||
shssoichiro
|
||||
thanhtaivtt
|
||||
uoag
|
||||
|
||||
1452
Changelog.md
1452
Changelog.md
File diff suppressed because it is too large
Load Diff
@@ -1,63 +0,0 @@
|
||||
# Collaborators
|
||||
|
||||
This is a list of the collaborators of the project and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
|
||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||
|
||||
|
||||
## [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
|
||||
* Owner of the fork
|
||||
|
||||
|
||||
|
||||
## [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
|
||||
## [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* Improved plugin architecture
|
||||
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||
|
||||
|
||||
|
||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
|
||||
## [bashonly](https://github.com/bashonly)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||
|
||||
|
||||
## [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* Reworked internals like `traverse_obj`, various core refactors and bugs fixes
|
||||
* Implemented proper progress reporting for parallel downloads
|
||||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||
87
Maintainers.md
Normal file
87
Maintainers.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# Maintainers
|
||||
|
||||
This file lists the maintainers of yt-dlp and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
|
||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||
|
||||
## Core Maintainers
|
||||
|
||||
Core Maintainers are responsible for reviewing and merging contributions, publishing releases, and steering the overall direction of the project.
|
||||
|
||||
**You can contact the core maintainers via `maintainers@yt-dlp.org`.**
|
||||
|
||||
This is **NOT** a support channel. [Open an issue](https://github.com/yt-dlp/yt-dlp/issues/new/choose) if you need help or want to report a bug.
|
||||
|
||||
### [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* Overhauled the networking stack and implemented support for `requests` and `curl_cffi` (`--impersonate`) HTTP clients
|
||||
* Reworked the plugin architecture to support installing plugins across all yt-dlp distributions (exe, pip, etc.)
|
||||
* Maintains support for YouTube
|
||||
* Added and fixed support for various other sites
|
||||
|
||||
### [bashonly](https://github.com/bashonly)
|
||||
|
||||
* Rewrote and maintains the build/release workflows and the self-updater: executables, automated/nightly/master releases, `--update-to`
|
||||
* Overhauled external downloader cookie handling
|
||||
* Added `--cookies-from-browser` support for Firefox containers
|
||||
* Overhauled and maintains support for sites like Youtube, Vimeo, Twitter, TikTok, etc
|
||||
* Added support for sites like Dacast, Kick, Loom, SproutVideo, Triller, Weverse, etc
|
||||
|
||||
|
||||
### [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* Reworked internals like `traverse_obj`, various core refactors and bugs fixes
|
||||
* Implemented proper progress reporting for parallel downloads
|
||||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||
|
||||
|
||||
### [sepro](https://github.com/seproDev)
|
||||
|
||||
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||
|
||||
|
||||
## Inactive Core Maintainers
|
||||
|
||||
### [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
|
||||
* Founder of the fork
|
||||
* Lead Maintainer from 2021-2024
|
||||
|
||||
|
||||
### [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
### [Ashish0804](https://github.com/Ashish0804)
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
## Triage Maintainers
|
||||
|
||||
Triage Maintainers are frequent contributors who can manage issues and pull requests.
|
||||
|
||||
- [gamer191](https://github.com/gamer191)
|
||||
- [garret1317](https://github.com/garret1317)
|
||||
- [pzhlkj6612](https://github.com/pzhlkj6612)
|
||||
- [DTrombett](https://github.com/dtrombett)
|
||||
- [doe1080](https://github.com/doe1080)
|
||||
- [grqz](https://github.com/grqz)
|
||||
114
Makefile
114
Makefile
@@ -1,4 +1,5 @@
|
||||
all: lazy-extractors yt-dlp doc pypi-files
|
||||
all-extra: lazy-extractors yt-dlp-extra doc pypi-files
|
||||
clean: clean-test clean-dist
|
||||
clean-all: clean clean-cache
|
||||
completions: completion-bash completion-fish completion-zsh
|
||||
@@ -10,21 +11,27 @@ tar: yt-dlp.tar.gz
|
||||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python3 -m build -sn .
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||
|
||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||
completions completion-bash completion-fish completion-zsh \
|
||||
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||
tar pypi-files lazy-extractors install uninstall
|
||||
tar pypi-files lazy-extractors install uninstall \
|
||||
all-extra yt-dlp-extra current-ejs-version
|
||||
|
||||
.IGNORE: current-ejs-version
|
||||
.SILENT: current-ejs-version
|
||||
|
||||
clean-test:
|
||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp \
|
||||
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS \
|
||||
yt-dlp.zip .ejs-* yt_dlp_ejs/
|
||||
clean-cache:
|
||||
find . \( \
|
||||
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||
@@ -80,28 +87,49 @@ test:
|
||||
offlinetest: codetest
|
||||
$(PYTHON) -m pytest -Werror -m "not download"
|
||||
|
||||
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
||||
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
||||
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
||||
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
||||
CODE_FILES != $(CODE_FILES_CMD)
|
||||
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
||||
yt-dlp: $(CODE_FILES)
|
||||
PY_CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's|/__init__\.py||' | grep -v '/__' | sort
|
||||
PY_CODE_FOLDERS != $(PY_CODE_FOLDERS_CMD)
|
||||
PY_CODE_FOLDERS ?= $(shell $(PY_CODE_FOLDERS_CMD))
|
||||
|
||||
PY_CODE_FILES_CMD = for f in $(PY_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.py|' ; done
|
||||
PY_CODE_FILES != $(PY_CODE_FILES_CMD)
|
||||
PY_CODE_FILES ?= $(shell $(PY_CODE_FILES_CMD))
|
||||
|
||||
JS_CODE_FOLDERS_CMD = find yt_dlp -type f -name '*.js' | sed 's|/[^/]\{1,\}\.js$$||' | uniq
|
||||
JS_CODE_FOLDERS != $(JS_CODE_FOLDERS_CMD)
|
||||
JS_CODE_FOLDERS ?= $(shell $(JS_CODE_FOLDERS_CMD))
|
||||
|
||||
JS_CODE_FILES_CMD = for f in $(JS_CODE_FOLDERS) ; do echo "$$f" | sed 's|$$|/*.js|' ; done
|
||||
JS_CODE_FILES != $(JS_CODE_FILES_CMD)
|
||||
JS_CODE_FILES ?= $(shell $(JS_CODE_FILES_CMD))
|
||||
|
||||
yt-dlp.zip: $(PY_CODE_FILES) $(JS_CODE_FILES)
|
||||
mkdir -p zip
|
||||
for d in $(CODE_FOLDERS) ; do \
|
||||
for d in $(PY_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
||||
mv zip/yt_dlp/__main__.py zip/
|
||||
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
||||
for d in $(JS_CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -f zip/yt_dlp/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip $(PY_CODE_FILES) $(JS_CODE_FILES))
|
||||
rm -rf zip
|
||||
|
||||
yt-dlp: yt-dlp.zip
|
||||
mkdir -p zip
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||
README.md: $(PY_CODE_FILES) devscripts/make_readme.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||
@@ -126,15 +154,15 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
||||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||
rm -f yt-dlp.1.temp.md
|
||||
|
||||
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||
completions/bash/yt-dlp: $(PY_CODE_FILES) devscripts/bash-completion.in
|
||||
mkdir -p completions/bash
|
||||
$(PYTHON) devscripts/bash-completion.py
|
||||
|
||||
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||
completions/zsh/_yt-dlp: $(PY_CODE_FILES) devscripts/zsh-completion.in
|
||||
mkdir -p completions/zsh
|
||||
$(PYTHON) devscripts/zsh-completion.py
|
||||
|
||||
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||
completions/fish/yt-dlp.fish: $(PY_CODE_FILES) devscripts/fish-completion.in
|
||||
mkdir -p completions/fish
|
||||
$(PYTHON) devscripts/fish-completion.py
|
||||
|
||||
@@ -156,9 +184,9 @@ yt-dlp.tar.gz: all
|
||||
--exclude '.git' \
|
||||
-- \
|
||||
README.md supportedsites.md Changelog.md LICENSE \
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
CONTRIBUTING.md Maintainers.md CONTRIBUTORS AUTHORS \
|
||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
|
||||
AUTHORS: Changelog.md
|
||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||
@@ -171,3 +199,45 @@ CONTRIBUTORS: Changelog.md
|
||||
echo 'Updating $@ from git commit history' ; \
|
||||
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||
fi
|
||||
|
||||
# The following EJS_-prefixed variables are auto-generated by devscripts/update_ejs.py
|
||||
# DO NOT EDIT!
|
||||
EJS_VERSION = 0.3.1
|
||||
EJS_WHEEL_NAME = yt_dlp_ejs-0.3.1-py3-none-any.whl
|
||||
EJS_WHEEL_HASH = sha256:a6e3548874db7c774388931752bb46c7f4642c044b2a189e56968f3d5ecab622
|
||||
EJS_PY_FOLDERS = yt_dlp_ejs yt_dlp_ejs/yt yt_dlp_ejs/yt/solver
|
||||
EJS_PY_FILES = yt_dlp_ejs/__init__.py yt_dlp_ejs/_version.py yt_dlp_ejs/yt/__init__.py yt_dlp_ejs/yt/solver/__init__.py
|
||||
EJS_JS_FOLDERS = yt_dlp_ejs/yt/solver
|
||||
EJS_JS_FILES = yt_dlp_ejs/yt/solver/core.min.js yt_dlp_ejs/yt/solver/lib.min.js
|
||||
|
||||
yt-dlp-extra: current-ejs-version .ejs-$(EJS_VERSION) $(EJS_PY_FILES) $(EJS_JS_FILES) yt-dlp.zip
|
||||
mkdir -p zip
|
||||
for d in $(EJS_PY_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
for d in $(EJS_JS_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.js zip/$$d/ ;\
|
||||
done
|
||||
(cd zip && touch -t 200001010101 $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
(cd zip && zip -q ../yt-dlp.zip $(EJS_PY_FILES) $(EJS_JS_FILES))
|
||||
cp -pP yt_dlp/__main__.py zip/
|
||||
touch -t 200001010101 zip/__main__.py
|
||||
(cd zip && zip -q ../yt-dlp.zip __main__.py)
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
rm -rf zip
|
||||
|
||||
.ejs-$(EJS_VERSION):
|
||||
@echo Downloading yt-dlp-ejs
|
||||
@echo "yt-dlp-ejs==$(EJS_VERSION) --hash $(EJS_WHEEL_HASH)" > .ejs-requirements.txt
|
||||
$(PYTHON) -m pip download -d ./build --no-deps --require-hashes -r .ejs-requirements.txt
|
||||
unzip -o build/$(EJS_WHEEL_NAME) "yt_dlp_ejs/*"
|
||||
@touch .ejs-$(EJS_VERSION)
|
||||
|
||||
current-ejs-version:
|
||||
rm -rf .ejs-*
|
||||
touch .ejs-$$($(PYTHON) -c 'import sys; sys.path = [""]; from yt_dlp_ejs import version; print(version)' 2>/dev/null)
|
||||
|
||||
4473
THIRD_PARTY_LICENSES.txt
Normal file
4473
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,178 @@
|
||||
services:
|
||||
static:
|
||||
build: static
|
||||
|
||||
linux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
environment:
|
||||
channel: ${channel}
|
||||
origin: ${origin}
|
||||
version: ${version}
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ~/build:/build
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
linux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
linux_armv7l:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
- ../../venv:/yt-dlp-build-venv
|
||||
|
||||
linux_armv7l_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm/v7"
|
||||
args:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_x86_64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_x86_64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/amd64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
musllinux_aarch64:
|
||||
build:
|
||||
context: linux
|
||||
target: build
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
EXCLUDE_CURL_CFFI: "1"
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
musllinux_aarch64_verify:
|
||||
build:
|
||||
context: linux
|
||||
target: verify
|
||||
platforms:
|
||||
- "linux/arm64"
|
||||
args:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||
ARG VERIFYIMAGE=alpine:3.22
|
||||
|
||||
|
||||
FROM $BUILDIMAGE AS build
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY build.sh /build.sh
|
||||
ENTRYPOINT ["/build.sh"]
|
||||
|
||||
|
||||
FROM $VERIFYIMAGE AS verify
|
||||
|
||||
WORKDIR /testing
|
||||
COPY verify.sh /verify.sh
|
||||
ENTRYPOINT ["/verify.sh"]
|
||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||
PYTHON_VERSION="3.13"
|
||||
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||
fi
|
||||
|
||||
function runpy {
|
||||
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
function venvpy {
|
||||
"python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
INCLUDES=(
|
||||
--include-group pyinstaller
|
||||
--include-group secretstorage
|
||||
)
|
||||
|
||||
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||
INCLUDES+=(--include-group curl-cffi)
|
||||
fi
|
||||
|
||||
runpy -m venv /yt-dlp-build-venv
|
||||
# shellcheck disable=SC1091
|
||||
source /yt-dlp-build-venv/bin/activate
|
||||
# Inside the venv we use venvpy instead of runpy
|
||||
venvpy -m ensurepip --upgrade --default-pip
|
||||
venvpy -m devscripts.install_deps --only-optional-groups --include-group build
|
||||
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||
venvpy -m devscripts.make_lazy_extractors
|
||||
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
|
||||
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||
mkdir -p /build
|
||||
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||
pushd "/build/${EXE_NAME}"
|
||||
chmod +x "${EXE_NAME}"
|
||||
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||
popd
|
||||
fi
|
||||
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||
venvpy -m bundle.pyinstaller
|
||||
chmod +x "./dist/${EXE_NAME}"
|
||||
fi
|
||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||
echo "All executable builds were skipped"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracting zip to verify onedir build"
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||
else
|
||||
echo "Attempting to install unzip"
|
||||
if command -v dnf >/dev/null 2>&1; then
|
||||
dnf -y install --allowerasing unzip
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum -y install unzip
|
||||
elif command -v apt-get >/dev/null 2>&1; then
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache unzip
|
||||
else
|
||||
echo "Unsupported image"
|
||||
exit 1
|
||||
fi
|
||||
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||
fi
|
||||
chmod +x "./${EXE_NAME}"
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
|
||||
if [ -z "${UPDATE_TO:-}" ]; then
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||
version="$("./${EXE_NAME}" --version)"
|
||||
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||
if [ "${version}" = "${downgraded_version}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,21 +0,0 @@
|
||||
FROM alpine:3.19 as base
|
||||
|
||||
RUN apk --update add --no-cache \
|
||||
build-base \
|
||||
python3 \
|
||||
pipx \
|
||||
;
|
||||
|
||||
RUN pipx install pyinstaller
|
||||
# Requires above step to prepare the shared venv
|
||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||
RUN apk --update add --no-cache \
|
||||
scons \
|
||||
patchelf \
|
||||
binutils \
|
||||
;
|
||||
RUN pipx install staticx
|
||||
|
||||
WORKDIR /yt-dlp
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
ENTRYPOINT /entrypoint.sh
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/ash
|
||||
set -e
|
||||
|
||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||
python -m devscripts.install_deps --include secretstorage
|
||||
python -m devscripts.make_lazy_extractors
|
||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||
python -m bundle.pyinstaller
|
||||
deactivate
|
||||
|
||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||
deactivate
|
||||
@@ -1,59 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import warnings
|
||||
|
||||
from py2exe import freeze
|
||||
|
||||
from devscripts.utils import read_version
|
||||
|
||||
VERSION = read_version()
|
||||
|
||||
|
||||
def main():
|
||||
warnings.warn(
|
||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||
|
||||
freeze(
|
||||
console=[{
|
||||
'script': './yt_dlp/__main__.py',
|
||||
'dest_base': 'yt-dlp',
|
||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||
}],
|
||||
version_info={
|
||||
'version': VERSION,
|
||||
'description': 'A feature-rich command-line audio/video downloader',
|
||||
'comments': 'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||
'product_name': 'yt-dlp',
|
||||
'product_version': VERSION,
|
||||
},
|
||||
options={
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': [
|
||||
# py2exe cannot import Crypto
|
||||
'Crypto',
|
||||
'Cryptodome',
|
||||
# py2exe appears to confuse this with our socks library.
|
||||
# We don't use pysocks and urllib3.contrib.socks would fail to import if tried.
|
||||
'urllib3.contrib.socks'
|
||||
],
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||
},
|
||||
zipfile=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -13,6 +13,8 @@
|
||||
from devscripts.utils import read_version
|
||||
|
||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||
OS_NAME = 'musllinux'
|
||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||
MACHINE = 'x86' if ARCH == '32' else ''
|
||||
|
||||
@@ -36,6 +38,9 @@ def main():
|
||||
f'--name={name}',
|
||||
'--icon=devscripts/logo.ico',
|
||||
'--upx-exclude=vcruntime140.dll',
|
||||
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13311
|
||||
# https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
'--exclude-module=pkg_resources',
|
||||
'--noconfirm',
|
||||
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||
*opts,
|
||||
@@ -59,16 +64,22 @@ def parse_options():
|
||||
|
||||
def exe(onedir):
|
||||
"""@returns (name, path)"""
|
||||
platform_name, machine, extension = {
|
||||
'win32': (None, MACHINE, '.exe'),
|
||||
'darwin': ('macos', None, None),
|
||||
}.get(OS_NAME, (OS_NAME, MACHINE, None))
|
||||
|
||||
name = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||
MACHINE,
|
||||
platform_name,
|
||||
machine,
|
||||
)))
|
||||
|
||||
return name, ''.join(filter(None, (
|
||||
'dist/',
|
||||
onedir and f'{name}/',
|
||||
name,
|
||||
OS_NAME == 'win32' and '.exe'
|
||||
extension,
|
||||
)))
|
||||
|
||||
|
||||
@@ -113,18 +124,17 @@ def windows_set_version(exe, version):
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo([StringTable('040904B0', [
|
||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface' % suffix),
|
||||
StringStruct('Comments', f'yt-dlp{suffix} Command Line Interface'),
|
||||
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||
StringStruct('FileVersion', version),
|
||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||
StringStruct(
|
||||
'ProductVersion', f'{version}{suffix} on Python {platform.python_version()}'),
|
||||
])]), VarFileInfo([VarStruct('Translation', [0, 1200])])
|
||||
]
|
||||
])]), VarFileInfo([VarStruct('Translation', [0, 1200])]),
|
||||
],
|
||||
))
|
||||
|
||||
|
||||
|
||||
@@ -6,13 +6,17 @@ __yt_dlp()
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
opts="{{flags}}"
|
||||
keywords=":ytfavorites :ytrecommended :ytsubscriptions :ytwatchlater :ythistory"
|
||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info"
|
||||
fileopts="-a|--batch-file|--download-archive|--cookies|--load-info-json"
|
||||
diropts="--cache-dir"
|
||||
|
||||
if [[ ${prev} =~ ${fileopts} ]]; then
|
||||
local IFS=$'\n'
|
||||
type compopt &>/dev/null && compopt -o filenames
|
||||
COMPREPLY=( $(compgen -f -- ${cur}) )
|
||||
return 0
|
||||
elif [[ ${prev} =~ ${diropts} ]]; then
|
||||
local IFS=$'\n'
|
||||
type compopt &>/dev/null && compopt -o dirnames
|
||||
COMPREPLY=( $(compgen -d -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
|
||||
import yt_dlp
|
||||
|
||||
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
|
||||
BASH_COMPLETION_TEMPLATE = "devscripts/bash-completion.in"
|
||||
BASH_COMPLETION_FILE = 'completions/bash/yt-dlp'
|
||||
BASH_COMPLETION_TEMPLATE = 'devscripts/bash-completion.in'
|
||||
|
||||
|
||||
def build_completion(opt_parser):
|
||||
@@ -21,9 +21,9 @@ def build_completion(opt_parser):
|
||||
opts_flag.append(option.get_opt_string())
|
||||
with open(BASH_COMPLETION_TEMPLATE) as f:
|
||||
template = f.read()
|
||||
with open(BASH_COMPLETION_FILE, "w") as f:
|
||||
with open(BASH_COMPLETION_FILE, 'w') as f:
|
||||
# just using the special char
|
||||
filled_template = template.replace("{{flags}}", " ".join(opts_flag))
|
||||
filled_template = template.replace('{{flags}}', ' '.join(opts_flag))
|
||||
f.write(filled_template)
|
||||
|
||||
|
||||
|
||||
@@ -169,5 +169,144 @@
|
||||
"when": "5c019f6328ad40d66561eac3c4de0b3cd070d0f6",
|
||||
"short": "[cleanup] Misc (#9765)",
|
||||
"authors": ["bashonly", "Grub4K", "seproDev"]
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "e6a22834df1776ec4e486526f6df2bf53cb7e06f",
|
||||
"short": "[ie/orf:on] Add `prefer_segments_playlist` extractor-arg (#10314)",
|
||||
"authors": ["seproDev"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "6aaf96a3d6e7d0d426e97e11a2fcf52fda00e733",
|
||||
"short": "[priority] Security: [[CVE-2024-38519](https://nvd.nist.gov/vuln/detail/CVE-2024-38519)] [Properly sanitize file-extension to prevent file system modification and RCE](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-79w7-vh3h-8g4j)\n - Unsafe extensions are now blocked from being downloaded"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "6075a029dba70a89675ae1250e7cdfd91f0eba41",
|
||||
"short": "[priority] Security: [[ie/douyutv] Do not use dangerous javascript source/URL](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3v33-3wmw-3785)\n - A dependency on potentially malicious third-party JavaScript code has been removed from the Douyu extractors"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "fb8b7f226d251e521a89b23c415e249e5b788e5c",
|
||||
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.9**\nSince Python 3.8 will reach end-of-life in October 2024, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "b31b81d85f00601710d4fac590c3e4efb4133283",
|
||||
"short": "[ci] Rerun failed tests (#11143)",
|
||||
"authors": ["Grub4K"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||
"short": "[priority] **py2exe is no longer supported**\nThis release's `yt-dlp_min.exe` will be the last, and it's actually a PyInstaller-bundled executable so that yt-dlp users updating their py2exe build with `-U` will be automatically migrated. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10087)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||
"short": "[priority] **Following this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "87884f15580910e4e0fe0e1db73508debc657471",
|
||||
"short": "[priority] **Beginning with this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "d784464399b600ba9516bbcec6286f11d68974dd",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.9**\nPython 3.8 reached its end-of-life on 2024.10.07, and yt-dlp has now removed support for it. As an unfortunate side effect, the official `yt-dlp.exe` and `yt-dlp_x86.exe` binaries are no longer supported on Windows 7. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "914af9a0cf51c9a3f74aa88d952bee8334c67511",
|
||||
"short": "Expand paths in `--plugin-dirs` (#11334)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "c29f5a7fae93a08f3cfbb6127b2faa75145b06a0",
|
||||
"short": "[ie/generic] Do not impersonate by default (#11336)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "57212a5f97ce367590aaa5c3e9a135eead8f81f7",
|
||||
"short": "[ie/vimeo] Fix API retries (#11351)",
|
||||
"authors": ["bashonly"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "52c0ffe40ad6e8404d93296f575007b05b04c686",
|
||||
"short": "[priority] **Login with OAuth is no longer supported for YouTube**\nDue to a change made by the site, yt-dlp is no longer able to support OAuth login for YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/11462#issuecomment-2471703090)"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "76ac023ff02f06e8c003d104f02a03deeddebdcd",
|
||||
"short": "[ie/youtube:tab] Improve shorts title extraction (#11997)",
|
||||
"authors": ["bashonly", "d3d9"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "88eb1e7a9a2720ac89d653c0d0e40292388823bb",
|
||||
"short": "[priority] **New option `--preset-alias`/`-t` has been added**\nThis provides convenient predefined aliases for common use cases. Available presets include `mp4`, `mp3`, `mkv`, `aac`, and `sleep`. See [the README](https://github.com/yt-dlp/yt-dlp/blob/master/README.md#preset-aliases) for more details."
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "d596824c2f8428362c072518856065070616e348"
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "7b81634fb1d15999757e7a9883daa6ef09ea785b"
|
||||
},
|
||||
{
|
||||
"action": "remove",
|
||||
"when": "500761e41acb96953a5064e951d41d190c287e46"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "f3008bc5f89d2691f2f8dfc51b406ef4e25281c3",
|
||||
"short": "[priority] **Default behaviour changed from `--mtime` to `--no-mtime`**\nyt-dlp no longer applies the server modified time to downloaded files by default. [Read more](https://github.com/yt-dlp/yt-dlp/issues/12780)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "959ac99e98c3215437e573c22d64be42d361e863",
|
||||
"short": "[priority] Security: [[CVE-2025-54072](https://nvd.nist.gov/vuln/detail/CVE-2025-54072)] [Fix `--exec` placeholder expansion on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-45hg-7f49-5h56)\n - When `--exec` is used on Windows, the filepath expanded from `{}` (or the default placeholder) is now properly escaped"
|
||||
},
|
||||
{
|
||||
"action": "change",
|
||||
"when": "b831406a1d3be34c159835079d12bae624c43610",
|
||||
"short": "[ie/rtve.es:program] Add extractor (#12955)",
|
||||
"authors": ["meGAmeS1", "seproDev"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "23c658b9cbe34a151f8f921ab1320bb5d4e40a4d",
|
||||
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.10**\nSince Python 3.9 will reach end-of-life in October 2025, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "cc5a5caac5fbc0d605b52bde0778d6fd5f97b5ab",
|
||||
"short": "[priority] **darwin_legacy_exe builds are being discontinued**\nThis release's `yt-dlp_macos_legacy` binary will likely be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13856)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.10**\nPython 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb",
|
||||
"short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)"
|
||||
}
|
||||
]
|
||||
|
||||
3
devscripts/cli_to_api.py
Normal file → Executable file
3
devscripts/cli_to_api.py
Normal file → Executable file
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -18,6 +20,7 @@ def parse_patched_options(opts):
|
||||
'fragment_retries': 0,
|
||||
'extract_flat': False,
|
||||
'concat_playlist': 'never',
|
||||
'update_self': False,
|
||||
})
|
||||
yt_dlp.options.create_parser = lambda: patched_parser
|
||||
try:
|
||||
|
||||
@@ -11,13 +11,12 @@
|
||||
import subprocess
|
||||
|
||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||
from yt_dlp.utils import intlist_to_bytes
|
||||
|
||||
secret_msg = b'Secret message goes here'
|
||||
|
||||
|
||||
def hex_str(int_list):
|
||||
return codecs.encode(intlist_to_bytes(int_list), 'hex')
|
||||
return codecs.encode(bytes(int_list), 'hex')
|
||||
|
||||
|
||||
def openssl_encode(algo, key, iv):
|
||||
|
||||
329
devscripts/generate_third_party_licenses.py
Normal file
329
devscripts/generate_third_party_licenses.py
Normal file
@@ -0,0 +1,329 @@
|
||||
import requests
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||
CACHE_LOCATION = '.license_cache'
|
||||
HEADER = '''THIRD-PARTY LICENSES
|
||||
|
||||
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||
Source code for bundled third-party components is available from the original projects.
|
||||
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependency:
|
||||
name: str
|
||||
license_url: str
|
||||
project_url: str = ''
|
||||
license: str = ''
|
||||
comment: str = ''
|
||||
|
||||
|
||||
DEPENDENCIES: list[Dependency] = [
|
||||
# Core runtime environment components
|
||||
Dependency(
|
||||
name='Python',
|
||||
license='PSF-2.0',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||
project_url='https://www.python.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='Microsoft Distributable Code',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||
comment='Only included in Windows builds',
|
||||
),
|
||||
Dependency(
|
||||
name='bzip2',
|
||||
license='bzip2-1.0.6',
|
||||
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||
project_url='https://sourceware.org/bzip2/',
|
||||
),
|
||||
Dependency(
|
||||
name='libffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||
project_url='https://sourceware.org/libffi/',
|
||||
),
|
||||
Dependency(
|
||||
name='OpenSSL 3.0+',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||
project_url='https://www.openssl.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='SQLite',
|
||||
license='Public Domain', # Technically does not need to be included
|
||||
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||
project_url='https://www.sqlite.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='liblzma',
|
||||
license='0BSD', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||
project_url='https://tukaani.org/xz/',
|
||||
),
|
||||
Dependency(
|
||||
name='mpdecimal',
|
||||
license='BSD-2-Clause',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||
project_url='https://www.bytereef.org/mpdecimal/',
|
||||
),
|
||||
Dependency(
|
||||
name='zlib',
|
||||
license='zlib',
|
||||
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||
project_url='https://zlib.net/',
|
||||
),
|
||||
Dependency(
|
||||
name='Expat',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||
project_url='https://libexpat.github.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='ncurses',
|
||||
license='X11-distribute-modifications-variant',
|
||||
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||
comment='Only included in Linux/macOS builds',
|
||||
project_url='https://invisible-island.net/ncurses/',
|
||||
),
|
||||
Dependency(
|
||||
name='GNU Readline',
|
||||
license='GPL-3.0-or-later',
|
||||
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://www.gnu.org/software/readline/',
|
||||
),
|
||||
Dependency(
|
||||
name='libstdc++',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||
),
|
||||
Dependency(
|
||||
name='libgcc',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='libuuid',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||
),
|
||||
Dependency(
|
||||
name='libintl',
|
||||
license='LGPL-2.1-or-later',
|
||||
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/gettext/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2 (Unicode character data files)',
|
||||
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libunistring',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libunistring/',
|
||||
),
|
||||
Dependency(
|
||||
name='librtmp',
|
||||
license='LGPL-2.1-or-later',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||
),
|
||||
Dependency(
|
||||
name='zstd',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://facebook.github.io/zstd/',
|
||||
),
|
||||
|
||||
# Python packages
|
||||
Dependency(
|
||||
name='brotli',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||
project_url='https://brotli.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='curl_cffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://curl-cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependency of curl_cffi
|
||||
Dependency(
|
||||
name='curl-impersonate',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||
),
|
||||
Dependency(
|
||||
name='cffi',
|
||||
license='MIT-0', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||
project_url='https://cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependecy of cffi
|
||||
Dependency(
|
||||
name='pycparser',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/eliben/pycparser',
|
||||
),
|
||||
Dependency(
|
||||
name='mutagen',
|
||||
license='GPL-2.0-or-later',
|
||||
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||
project_url='https://mutagen.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='PyCryptodome',
|
||||
license='Public Domain and BSD-2-Clause',
|
||||
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||
project_url='https://www.pycryptodome.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='certifi',
|
||||
license='MPL-2.0',
|
||||
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||
project_url='https://github.com/certifi/python-certifi',
|
||||
),
|
||||
Dependency(
|
||||
name='requests',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||
project_url='https://requests.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='charset-normalizer',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||
project_url='https://charset-normalizer.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='idna',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||
project_url='https://github.com/kjd/idna',
|
||||
),
|
||||
Dependency(
|
||||
name='urllib3',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||
project_url='https://urllib3.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='SecretStorage',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://secretstorage.readthedocs.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='cryptography',
|
||||
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://cryptography.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='Jeepney',
|
||||
license='MIT',
|
||||
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://jeepney.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='websockets',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||
project_url='https://websockets.readthedocs.io/',
|
||||
),
|
||||
# Dependencies of yt-dlp-ejs
|
||||
Dependency(
|
||||
name='Meriyah',
|
||||
license='ISC',
|
||||
license_url='https://raw.githubusercontent.com/meriyah/meriyah/refs/heads/main/LICENSE.md',
|
||||
project_url='https://github.com/meriyah/meriyah',
|
||||
),
|
||||
Dependency(
|
||||
name='Astring',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/davidbonnet/astring/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/davidbonnet/astring/',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def fetch_text(dep: Dependency) -> str:
|
||||
cache_dir = Path(CACHE_LOCATION)
|
||||
cache_dir.mkdir(exist_ok=True)
|
||||
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||
cache_file = cache_dir / f'{url_hash}.txt'
|
||||
|
||||
if cache_file.exists():
|
||||
return cache_file.read_text()
|
||||
|
||||
# UA needed since some domains block requests default UA
|
||||
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||
req.raise_for_status()
|
||||
text = req.text
|
||||
cache_file.write_text(text)
|
||||
return text
|
||||
|
||||
|
||||
def build_output() -> str:
|
||||
lines = [HEADER]
|
||||
for d in DEPENDENCIES:
|
||||
lines.append('\n')
|
||||
lines.append('-' * 80)
|
||||
header = f'{d.name}'
|
||||
if d.license:
|
||||
header += f' | {d.license}'
|
||||
if d.comment:
|
||||
header += f'\nNote: {d.comment}'
|
||||
if d.project_url:
|
||||
header += f'\nURL: {d.project_url}'
|
||||
lines.append(header)
|
||||
lines.append('-' * 80)
|
||||
|
||||
text = fetch_text(d)
|
||||
lines.append(text.strip('\n') + '\n')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
content = build_output()
|
||||
Path(DEFAULT_OUTPUT).write_text(content)
|
||||
@@ -22,14 +22,19 @@ def parse_args():
|
||||
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||
help='input file (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency')
|
||||
'-e', '--exclude-dependency', metavar='DEPENDENCY', action='append',
|
||||
help='exclude a dependency (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-i', '--include', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group')
|
||||
'-i', '--include-group', metavar='GROUP', action='append',
|
||||
help='include an optional dependency group (can be used multiple times)')
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional', action='store_true',
|
||||
help='only install optional dependencies')
|
||||
'-c', '--cherry-pick', metavar='DEPENDENCY', action='append',
|
||||
help=(
|
||||
'only include a specific dependency from the resulting dependency list '
|
||||
'(can be used multiple times)'))
|
||||
parser.add_argument(
|
||||
'-o', '--only-optional-groups', action='store_true',
|
||||
help='omit default dependencies unless the "default" group is specified with --include-group')
|
||||
parser.add_argument(
|
||||
'-p', '--print', action='store_true',
|
||||
help='only print requirements to stdout')
|
||||
@@ -39,30 +44,41 @@ def parse_args():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def uniq(arg) -> dict[str, None]:
|
||||
return dict.fromkeys(map(str.lower, arg or ()))
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
project_table = parse_toml(read_file(args.input))['project']
|
||||
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||
optional_groups = project_table['optional-dependencies']
|
||||
excludes = args.exclude or []
|
||||
|
||||
excludes = uniq(args.exclude_dependency)
|
||||
only_includes = uniq(args.cherry_pick)
|
||||
include_groups = uniq(args.include_group)
|
||||
|
||||
def yield_deps(group):
|
||||
for dep in group:
|
||||
if mobj := recursive_pattern.fullmatch(dep):
|
||||
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||
yield from optional_groups.get(mobj.group('group_name'), ())
|
||||
else:
|
||||
yield dep
|
||||
|
||||
targets = []
|
||||
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||
targets.extend(project_table['dependencies'])
|
||||
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||
targets.extend(yield_deps(optional_groups['default']))
|
||||
targets = {}
|
||||
if not args.only_optional_groups:
|
||||
# legacy: 'dependencies' is empty now
|
||||
targets.update(dict.fromkeys(project_table['dependencies']))
|
||||
targets.update(dict.fromkeys(yield_deps(optional_groups['default'])))
|
||||
|
||||
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||
targets.extend(yield_deps(include))
|
||||
for include in filter(None, map(optional_groups.get, include_groups)):
|
||||
targets.update(dict.fromkeys(yield_deps(include)))
|
||||
|
||||
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||
def target_filter(target):
|
||||
name = re.match(r'[\w-]+', target).group(0).lower()
|
||||
return name not in excludes and (not only_includes or name in only_includes)
|
||||
|
||||
targets = list(filter(target_filter, targets))
|
||||
|
||||
if args.print:
|
||||
for target in targets:
|
||||
|
||||
@@ -71,14 +71,13 @@ def group_lookup(cls):
|
||||
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||
|
||||
result = cls.group_lookup().get(group)
|
||||
if not result:
|
||||
if subgroup:
|
||||
return None, value
|
||||
subgroup = group
|
||||
result = cls.subgroup_lookup().get(subgroup)
|
||||
if result := cls.group_lookup().get(group):
|
||||
return result, subgroup or None
|
||||
|
||||
return result, subgroup or None
|
||||
if subgroup:
|
||||
return None, value
|
||||
|
||||
return cls.subgroup_lookup().get(group), group or None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -136,8 +135,7 @@ def _format_groups(self, groups):
|
||||
first = False
|
||||
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||
|
||||
group = groups[item]
|
||||
if group:
|
||||
if group := groups[item]:
|
||||
yield self.format_module(item.value, group)
|
||||
|
||||
if self._collapsible:
|
||||
@@ -223,10 +221,10 @@ def format_single_change(self, info: CommitInfo):
|
||||
|
||||
return message if not sep else f'{message}{sep}{rest}'
|
||||
|
||||
def _format_message_link(self, message, hash):
|
||||
assert message or hash, 'Improperly defined commit message or override'
|
||||
message = message if message else hash[:HASH_LENGTH]
|
||||
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
||||
def _format_message_link(self, message, commit_hash):
|
||||
assert message or commit_hash, 'Improperly defined commit message or override'
|
||||
message = message if message else commit_hash[:HASH_LENGTH]
|
||||
return f'[{message}]({self.repo_url}/commit/{commit_hash})' if commit_hash else message
|
||||
|
||||
def _format_issues(self, issues):
|
||||
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||
@@ -253,7 +251,7 @@ class CommitRange:
|
||||
''', re.VERBOSE | re.DOTALL)
|
||||
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert|Improve)\s+([\da-f]{40})')
|
||||
FIXES_RE = re.compile(r'(?i:(?:bug\s*)?fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Improve)\s+([\da-f]{40})')
|
||||
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||
|
||||
def __init__(self, start, end, default_author=None):
|
||||
@@ -287,11 +285,16 @@ def _get_commits_and_fixes(self, default_author):
|
||||
short = next(lines)
|
||||
skip = short.startswith('Release ') or short == '[version] update'
|
||||
|
||||
fix_commitish = None
|
||||
if match := self.FIXES_RE.search(short):
|
||||
fix_commitish = match.group(1)
|
||||
|
||||
authors = [default_author] if default_author else []
|
||||
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||
match = self.AUTHOR_INDICATOR_RE.match(line)
|
||||
if match:
|
||||
if match := self.AUTHOR_INDICATOR_RE.match(line):
|
||||
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||
if not fix_commitish and (match := self.FIXES_RE.fullmatch(line)):
|
||||
fix_commitish = match.group(1)
|
||||
|
||||
commit = Commit(commit_hash, short, authors)
|
||||
if skip and (self._start or not i):
|
||||
@@ -301,21 +304,17 @@ def _get_commits_and_fixes(self, default_author):
|
||||
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||
break
|
||||
|
||||
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
||||
if revert_match:
|
||||
reverts[revert_match.group(1)] = commit
|
||||
if match := self.REVERT_RE.fullmatch(commit.short):
|
||||
reverts[match.group(1)] = commit
|
||||
continue
|
||||
|
||||
fix_match = self.FIXES_RE.search(commit.short)
|
||||
if fix_match:
|
||||
commitish = fix_match.group(1)
|
||||
fixes[commitish].append(commit)
|
||||
if fix_commitish:
|
||||
fixes[fix_commitish].append(commit)
|
||||
|
||||
commits[commit.hash] = commit
|
||||
|
||||
for commitish, revert_commit in reverts.items():
|
||||
reverted = commits.pop(commitish, None)
|
||||
if reverted:
|
||||
if reverted := commits.pop(commitish, None):
|
||||
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||
else:
|
||||
commits[revert_commit.hash] = revert_commit
|
||||
@@ -356,7 +355,7 @@ def apply_overrides(self, overrides):
|
||||
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||
self._commits[commit.hash] = commit
|
||||
|
||||
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
||||
self._commits = dict(reversed(self._commits.items()))
|
||||
|
||||
def groups(self):
|
||||
group_dict = defaultdict(list)
|
||||
@@ -374,7 +373,7 @@ def groups(self):
|
||||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||
|
||||
if prefix:
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||
group = next(iter(filter(None, groups)), None)
|
||||
details = ', '.join(unique(details))
|
||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||
@@ -461,8 +460,7 @@ def create_changelog(args):
|
||||
|
||||
logger.info(f'Loaded {len(commits)} commits')
|
||||
|
||||
new_contributors = get_new_contributors(args.contributors_path, commits)
|
||||
if new_contributors:
|
||||
if new_contributors := get_new_contributors(args.contributors_path, commits):
|
||||
if args.contributors:
|
||||
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||
|
||||
@@ -8,7 +8,7 @@ def main():
|
||||
return # This is unused in yt-dlp
|
||||
|
||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||
options, args = parser.parse_args()
|
||||
_, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error('Expected an input and an output filename')
|
||||
|
||||
|
||||
@@ -11,11 +11,13 @@
|
||||
|
||||
from devscripts.utils import get_filename_args, read_file, write_file
|
||||
|
||||
VERBOSE_TMPL = '''
|
||||
VERBOSE = '''
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
description: |
|
||||
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
@@ -32,14 +34,15 @@
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||
[debug] Proxy map: {}
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||
[debug] Loaded 1838 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
@@ -49,20 +52,20 @@
|
||||
'''.strip()
|
||||
|
||||
NO_SKIP = '''
|
||||
- type: checkboxes
|
||||
- type: markdown
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
||||
required: true
|
||||
value: |
|
||||
> [!IMPORTANT]
|
||||
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||
'''.strip()
|
||||
|
||||
|
||||
def main():
|
||||
fields = {'no_skip': NO_SKIP}
|
||||
fields['verbose'] = VERBOSE_TMPL % fields
|
||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||
fields = {
|
||||
'no_skip': NO_SKIP,
|
||||
'verbose': VERBOSE,
|
||||
'verbose_optional': re.sub(r'(\n\s+validations:)?\n\s+required: true', '', VERBOSE),
|
||||
}
|
||||
|
||||
infile, outfile = get_filename_args(has_infile=True)
|
||||
write_file(outfile, read_file(infile) % fields)
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
@@ -11,6 +10,9 @@
|
||||
from inspect import getsource
|
||||
|
||||
from devscripts.utils import get_filename_args, read_file, write_file
|
||||
from yt_dlp.extractor import import_extractors
|
||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||
from yt_dlp.globals import extractors
|
||||
|
||||
NO_ATTR = object()
|
||||
STATIC_CLASS_PROPERTIES = [
|
||||
@@ -34,17 +36,12 @@ class {name}({bases}):
|
||||
|
||||
|
||||
def main():
|
||||
os.environ['YTDLP_NO_PLUGINS'] = 'true'
|
||||
os.environ['YTDLP_NO_LAZY_EXTRACTORS'] = 'true'
|
||||
|
||||
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||
if os.path.exists(lazy_extractors_filename):
|
||||
os.remove(lazy_extractors_filename)
|
||||
|
||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||
|
||||
import yt_dlp.plugins
|
||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||
|
||||
# Filter out plugins
|
||||
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||
import_extractors()
|
||||
|
||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||
module_src = '\n'.join((
|
||||
@@ -52,26 +49,12 @@ def main():
|
||||
' _module = None',
|
||||
*extra_ie_code(DummyInfoExtractor),
|
||||
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||
*build_ies(list(extractors.value.values()), (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||
))
|
||||
|
||||
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||
|
||||
|
||||
def get_all_ies():
|
||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||
if os.path.exists(PLUGINS_DIRNAME):
|
||||
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||
try:
|
||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||
finally:
|
||||
if os.path.exists(BLOCKED_DIRNAME):
|
||||
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||
return _ALL_CLASSES
|
||||
|
||||
|
||||
def extra_ie_code(ie, base=None):
|
||||
for var in STATIC_CLASS_PROPERTIES:
|
||||
val = getattr(ie, var)
|
||||
@@ -92,7 +75,7 @@ def build_ies(ies, bases, attr_base):
|
||||
if ie in ies:
|
||||
names.append(ie.__name__)
|
||||
|
||||
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
|
||||
yield '\n_CLASS_LOOKUP = {%s}' % ', '.join(f'{name!r}: {name}' for name in names)
|
||||
|
||||
|
||||
def sort_ies(ies, ignored_bases):
|
||||
|
||||
@@ -51,7 +51,7 @@ def apply_patch(text, patch):
|
||||
),
|
||||
( # Headings
|
||||
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||
r'## \1'
|
||||
r'## \1',
|
||||
),
|
||||
( # Fixup `--date` formatting
|
||||
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
||||
@@ -61,26 +61,26 @@ def apply_patch(text, patch):
|
||||
),
|
||||
( # Do not split URLs
|
||||
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n')),
|
||||
),
|
||||
( # Do not split "words"
|
||||
rf'(?m)({delim}\S+)+$',
|
||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, ''))),
|
||||
),
|
||||
( # Allow overshooting last line
|
||||
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||
else mobj.group())
|
||||
else mobj.group()),
|
||||
),
|
||||
( # Avoid newline when a space is available b/w switch and description
|
||||
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||
r'\1 '
|
||||
r'\1 ',
|
||||
),
|
||||
( # Replace brackets with a Markdown link
|
||||
r'SponsorBlock API \((http.+)\)',
|
||||
r'[SponsorBlock API](\1)'
|
||||
r'[SponsorBlock API](\1)',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,10 +10,21 @@
|
||||
from devscripts.utils import get_filename_args, write_file
|
||||
from yt_dlp.extractor import list_extractor_classes
|
||||
|
||||
TEMPLATE = '''\
|
||||
# Supported sites
|
||||
|
||||
Below is a list of all extractors that are currently included with yt-dlp.
|
||||
If a site is not listed here, it might still be supported by yt-dlp's embed extraction or generic extractor.
|
||||
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
||||
The only reliable way to check if a site is supported is to try it.
|
||||
|
||||
{ie_list}
|
||||
'''
|
||||
|
||||
|
||||
def main():
|
||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
||||
write_file(get_filename_args(), TEMPLATE.format(ie_list=out))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -16,7 +16,19 @@
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||
parser.add_argument(
|
||||
'test', help='a extractor tests, or one of "core" or "download"', nargs='*')
|
||||
'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*')
|
||||
parser.add_argument(
|
||||
'--flaky',
|
||||
action='store_true',
|
||||
default=None,
|
||||
help='Allow running flaky tests. (default: run, unless in CI)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-flaky',
|
||||
action='store_false',
|
||||
dest='flaky',
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||
parser.add_argument(
|
||||
@@ -24,10 +36,11 @@ def parse_args():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def run_tests(*tests, pattern=None, ci=False):
|
||||
run_core = 'core' in tests or (not pattern and not tests)
|
||||
def run_tests(*tests, pattern=None, ci=False, flaky: bool | None = None):
|
||||
# XXX: hatch uses `tests` if no arguments are passed
|
||||
run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests)
|
||||
run_download = 'download' in tests
|
||||
tests = list(map(fix_test_name, tests))
|
||||
run_flaky = flaky or (flaky is None and not ci)
|
||||
|
||||
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||
@@ -41,7 +54,11 @@ def run_tests(*tests, pattern=None, ci=False):
|
||||
arguments.extend(['-m', 'download'])
|
||||
else:
|
||||
arguments.extend(
|
||||
f'test/test_download.py::TestDownload::test_{test}' for test in tests)
|
||||
test if '/' in test
|
||||
else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}'
|
||||
for test in tests)
|
||||
if not run_flaky:
|
||||
arguments.append('--disallow-flaky')
|
||||
|
||||
print(f'Running {arguments}', flush=True)
|
||||
try:
|
||||
@@ -70,6 +87,11 @@ def run_tests(*tests, pattern=None, ci=False):
|
||||
args = parse_args()
|
||||
|
||||
os.chdir(Path(__file__).parent.parent)
|
||||
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||
sys.exit(run_tests(
|
||||
*args.test,
|
||||
pattern=args.k,
|
||||
ci=bool(os.getenv('CI')),
|
||||
flaky=args.flaky,
|
||||
))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
@@ -30,7 +30,7 @@ def property_setter(name, value):
|
||||
opts = parse_options()
|
||||
transform = compose_functions(
|
||||
property_setter('VARIANT', opts.variant),
|
||||
property_setter('UPDATE_HINT', opts.update_message)
|
||||
property_setter('UPDATE_HINT', opts.update_message),
|
||||
)
|
||||
|
||||
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
||||
|
||||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||
|
||||
|
||||
def setup_variables(environment):
|
||||
"""
|
||||
`environment` must contain these keys:
|
||||
REPOSITORY, INPUTS, PROCESSED,
|
||||
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||
HAS_ARCHIVE_REPO_TOKEN
|
||||
|
||||
`INPUTS` must contain these keys:
|
||||
prerelease
|
||||
|
||||
`PROCESSED` must contain these keys:
|
||||
source_repo, source_tag,
|
||||
target_repo, target_tag
|
||||
"""
|
||||
REPOSITORY = environment['REPOSITORY']
|
||||
INPUTS = json.loads(environment['INPUTS'])
|
||||
PROCESSED = json.loads(environment['PROCESSED'])
|
||||
|
||||
source_channel = None
|
||||
does_not_have_needed_token = False
|
||||
target_repo_token = None
|
||||
pypi_project = None
|
||||
pypi_suffix = None
|
||||
|
||||
source_repo = PROCESSED['source_repo']
|
||||
source_tag = PROCESSED['source_tag']
|
||||
if source_repo == 'stable':
|
||||
source_repo = STABLE_REPOSITORY
|
||||
if not source_repo:
|
||||
source_repo = REPOSITORY
|
||||
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||
elif not source_tag and '/' not in source_repo:
|
||||
source_tag = source_repo
|
||||
source_repo = REPOSITORY
|
||||
|
||||
resolved_source = source_repo
|
||||
if source_tag:
|
||||
resolved_source = f'{resolved_source}@{source_tag}'
|
||||
elif source_repo == STABLE_REPOSITORY:
|
||||
resolved_source = 'stable'
|
||||
|
||||
revision = None
|
||||
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||
|
||||
version = calculate_version(INPUTS.get('version') or revision)
|
||||
|
||||
target_repo = PROCESSED['target_repo']
|
||||
target_tag = PROCESSED['target_tag']
|
||||
if target_repo:
|
||||
if target_repo == 'stable':
|
||||
target_repo = STABLE_REPOSITORY
|
||||
if not target_tag:
|
||||
if target_repo == STABLE_REPOSITORY:
|
||||
target_tag = version
|
||||
elif environment['TARGET_ARCHIVE_REPO']:
|
||||
target_tag = source_tag or version
|
||||
else:
|
||||
target_tag = target_repo
|
||||
target_repo = REPOSITORY
|
||||
if target_repo != REPOSITORY:
|
||||
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_tag = source_tag or version
|
||||
if source_channel:
|
||||
target_repo = source_channel
|
||||
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||
does_not_have_needed_token = True
|
||||
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||
else:
|
||||
target_repo = REPOSITORY
|
||||
|
||||
if does_not_have_needed_token:
|
||||
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||
return None
|
||||
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||
|
||||
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||
pypi_project = environment['PYPI_PROJECT'] or None
|
||||
|
||||
return {
|
||||
'channel': resolved_source,
|
||||
'version': version,
|
||||
'target_repo': target_repo,
|
||||
'target_repo_token': target_repo_token,
|
||||
'target_tag': target_tag,
|
||||
'pypi_project': pypi_project,
|
||||
'pypi_suffix': pypi_suffix,
|
||||
}
|
||||
|
||||
|
||||
def process_inputs(inputs):
|
||||
outputs = {}
|
||||
for key in ('source', 'target'):
|
||||
repo, _, tag = inputs.get(key, '').partition('@')
|
||||
outputs[f'{key}_repo'] = repo
|
||||
outputs[f'{key}_tag'] = tag
|
||||
return outputs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if not os.getenv('GITHUB_OUTPUT'):
|
||||
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if 'process_inputs' in sys.argv:
|
||||
inputs = json.loads(os.environ['INPUTS'])
|
||||
print('::group::Inputs')
|
||||
print(json.dumps(inputs, indent=2))
|
||||
print('::endgroup::')
|
||||
outputs = process_inputs(inputs)
|
||||
print('::group::Processed')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
sys.exit(0)
|
||||
|
||||
outputs = setup_variables(dict(os.environ))
|
||||
if not outputs:
|
||||
sys.exit(1)
|
||||
|
||||
print('::group::Output variables')
|
||||
print(json.dumps(outputs, indent=2))
|
||||
print('::endgroup::')
|
||||
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
@@ -0,0 +1,324 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||
from devscripts.utils import calculate_version
|
||||
|
||||
|
||||
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||
inp = inputs.copy()
|
||||
inp.setdefault('linux_armv7l', True)
|
||||
inp.setdefault('prerelease', False)
|
||||
processed = process_inputs(inp)
|
||||
source_repo = processed['source_repo'].upper()
|
||||
target_repo = processed['target_repo'].upper()
|
||||
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||
|
||||
env = {
|
||||
# Keep this in sync with prepare.setup_variables in release.yml
|
||||
'INPUTS': json.dumps(inp),
|
||||
'PROCESSED': json.dumps(processed),
|
||||
'REPOSITORY': github_repository,
|
||||
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||
}
|
||||
|
||||
result = setup_variables(env)
|
||||
if not expected:
|
||||
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||
return
|
||||
|
||||
exp = expected.copy()
|
||||
if ignore_revision:
|
||||
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||
version_is_tag = result['version'] == result['target_tag']
|
||||
for dct in (result, exp):
|
||||
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||
if version_is_tag:
|
||||
dct['target_tag'] = dct['version']
|
||||
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||
|
||||
|
||||
def test_setup_variables():
|
||||
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||
DEFAULT_VERSION = calculate_version()
|
||||
BASE_REPO_VARS = {
|
||||
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||
'PUSH_VERSION_COMMIT': '1',
|
||||
'PYPI_PROJECT': 'yt-dlp',
|
||||
}
|
||||
BASE_REPO_SECRETS = {
|
||||
'ARCHIVE_REPO_TOKEN': '1',
|
||||
}
|
||||
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||
'target': 'stable@experimental',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': STABLE_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||
{}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||
{}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'nightly',
|
||||
'target': 'nightly',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'nightly',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'master',
|
||||
'target': 'master',
|
||||
}, {
|
||||
'channel': f'{FORK_REPOSITORY}@master',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'master',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||
{}, {}, {'version': '123'}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
})
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||
'channel': FORK_REPOSITORY,
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'PYPI_PROJECT': 'yt-dlp-test',
|
||||
}, {
|
||||
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target': 'nightly',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||
'MASTER_PYPI_SUFFIX': 'dev',
|
||||
}, {
|
||||
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||
}, {
|
||||
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target': 'master',
|
||||
'prerelease': True,
|
||||
}, {
|
||||
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||
'pypi_project': 'yt-dlp-test',
|
||||
'pypi_suffix': 'dev',
|
||||
}, ignore_revision=True)
|
||||
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||
{}, {}, {'source': 'experimental'}, {
|
||||
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
_test(
|
||||
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||
{}, {}, {
|
||||
'prerelease': True,
|
||||
'source': 'stable',
|
||||
'target': 'experimental',
|
||||
}, {
|
||||
'channel': 'stable',
|
||||
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||
'target_repo': FORK_REPOSITORY,
|
||||
'target_repo_token': None,
|
||||
'target_tag': 'experimental',
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
@@ -9,24 +9,9 @@
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import datetime as dt
|
||||
import sys
|
||||
|
||||
from devscripts.utils import read_version, run_process, write_file
|
||||
|
||||
|
||||
def get_new_version(version, revision):
|
||||
if not version:
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert revision.isdecimal(), 'Revision must be a number'
|
||||
else:
|
||||
old_version = read_version().split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int((old_version + [0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
from devscripts.utils import calculate_version, run_process, write_file
|
||||
|
||||
|
||||
def get_git_head():
|
||||
@@ -72,9 +57,7 @@ def get_git_head():
|
||||
args = parser.parse_args()
|
||||
|
||||
git_head = get_git_head()
|
||||
version = (
|
||||
args.version if args.version and '.' in args.version
|
||||
else get_new_version(None, args.version))
|
||||
version = calculate_version(args.version)
|
||||
write_file(args.output, VERSION_TEMPLATE.format(
|
||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||
package_version=f'{version}{args.suffix}'))
|
||||
|
||||
@@ -20,7 +20,9 @@
|
||||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||
help='path to the Changelog file')
|
||||
args = parser.parse_args()
|
||||
new_entry = create_changelog(args)
|
||||
|
||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
||||
current_version = read_version()
|
||||
if current_version != changelog.splitlines()[0]:
|
||||
new_entry = create_changelog(args)
|
||||
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||
|
||||
166
devscripts/update_ejs.py
Executable file
166
devscripts/update_ejs.py
Executable file
@@ -0,0 +1,166 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import hashlib
|
||||
import pathlib
|
||||
import urllib.request
|
||||
import zipfile
|
||||
|
||||
|
||||
TEMPLATE = '''\
|
||||
# This file is generated by devscripts/update_ejs.py. DO NOT MODIFY!
|
||||
|
||||
VERSION = {version!r}
|
||||
HASHES = {{
|
||||
{hash_mapping}
|
||||
}}
|
||||
'''
|
||||
PREFIX = ' "yt-dlp-ejs=='
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent
|
||||
PYPROJECT_PATH = BASE_PATH / 'pyproject.toml'
|
||||
PACKAGE_PATH = BASE_PATH / 'yt_dlp/extractor/youtube/jsc/_builtin/vendor'
|
||||
RELEASE_URL = 'https://api.github.com/repos/yt-dlp/ejs/releases/latest'
|
||||
ASSETS = {
|
||||
'yt.solver.lib.js': False,
|
||||
'yt.solver.lib.min.js': False,
|
||||
'yt.solver.deno.lib.js': True,
|
||||
'yt.solver.bun.lib.js': True,
|
||||
'yt.solver.core.min.js': False,
|
||||
'yt.solver.core.js': True,
|
||||
}
|
||||
MAKEFILE_PATH = BASE_PATH / 'Makefile'
|
||||
|
||||
|
||||
def request(url: str):
|
||||
return contextlib.closing(urllib.request.urlopen(url))
|
||||
|
||||
|
||||
def makefile_variables(
|
||||
version: str | None = None,
|
||||
name: str | None = None,
|
||||
digest: str | None = None,
|
||||
data: bytes | None = None,
|
||||
keys_only: bool = False,
|
||||
) -> dict[str, str | None]:
|
||||
assert keys_only or all(arg is not None for arg in (version, name, digest, data))
|
||||
|
||||
return {
|
||||
'EJS_VERSION': None if keys_only else version,
|
||||
'EJS_WHEEL_NAME': None if keys_only else name,
|
||||
'EJS_WHEEL_HASH': None if keys_only else digest,
|
||||
'EJS_PY_FOLDERS': None if keys_only else list_wheel_contents(data, 'py', files=False),
|
||||
'EJS_PY_FILES': None if keys_only else list_wheel_contents(data, 'py', folders=False),
|
||||
'EJS_JS_FOLDERS': None if keys_only else list_wheel_contents(data, 'js', files=False),
|
||||
'EJS_JS_FILES': None if keys_only else list_wheel_contents(data, 'js', folders=False),
|
||||
}
|
||||
|
||||
|
||||
def list_wheel_contents(
|
||||
wheel_data: bytes,
|
||||
suffix: str | None = None,
|
||||
folders: bool = True,
|
||||
files: bool = True,
|
||||
) -> str:
|
||||
assert folders or files, 'at least one of "folders" or "files" must be True'
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(wheel_data)) as zipf:
|
||||
path_gen = (zinfo.filename for zinfo in zipf.infolist())
|
||||
|
||||
filtered = filter(lambda path: path.startswith('yt_dlp_ejs/'), path_gen)
|
||||
if suffix:
|
||||
filtered = filter(lambda path: path.endswith(f'.{suffix}'), filtered)
|
||||
|
||||
files_list = list(filtered)
|
||||
if not folders:
|
||||
return ' '.join(files_list)
|
||||
|
||||
folders_list = list(dict.fromkeys(path.rpartition('/')[0] for path in files_list))
|
||||
if not files:
|
||||
return ' '.join(folders_list)
|
||||
|
||||
return ' '.join(folders_list + files_list)
|
||||
|
||||
|
||||
def main():
|
||||
current_version = None
|
||||
with PYPROJECT_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(PREFIX):
|
||||
continue
|
||||
current_version, _, _ = line.removeprefix(PREFIX).partition('"')
|
||||
|
||||
if not current_version:
|
||||
print('yt-dlp-ejs dependency line could not be found')
|
||||
return
|
||||
|
||||
makefile_info = makefile_variables(keys_only=True)
|
||||
prefixes = tuple(f'{key} = ' for key in makefile_info)
|
||||
with MAKEFILE_PATH.open() as file:
|
||||
for line in file:
|
||||
if not line.startswith(prefixes):
|
||||
continue
|
||||
key, _, val = line.partition(' = ')
|
||||
makefile_info[key] = val.rstrip()
|
||||
|
||||
with request(RELEASE_URL) as resp:
|
||||
info = json.load(resp)
|
||||
|
||||
version = info['tag_name']
|
||||
if version == current_version:
|
||||
print(f'yt-dlp-ejs is up to date! ({version})')
|
||||
return
|
||||
|
||||
print(f'Updating yt-dlp-ejs from {current_version} to {version}')
|
||||
hashes = []
|
||||
wheel_info = {}
|
||||
for asset in info['assets']:
|
||||
name = asset['name']
|
||||
is_wheel = name.startswith('yt_dlp_ejs-') and name.endswith('.whl')
|
||||
if not is_wheel and name not in ASSETS:
|
||||
continue
|
||||
with request(asset['browser_download_url']) as resp:
|
||||
data = resp.read()
|
||||
|
||||
# verify digest from github
|
||||
digest = asset['digest']
|
||||
algo, _, expected = digest.partition(':')
|
||||
hexdigest = hashlib.new(algo, data).hexdigest()
|
||||
assert hexdigest == expected, f'downloaded attest mismatch ({hexdigest!r} != {expected!r})'
|
||||
|
||||
if is_wheel:
|
||||
wheel_info = makefile_variables(version, name, digest, data)
|
||||
continue
|
||||
|
||||
# calculate sha3-512 digest
|
||||
asset_hash = hashlib.sha3_512(data).hexdigest()
|
||||
hashes.append(f' {name!r}: {asset_hash!r},')
|
||||
|
||||
if ASSETS[name]:
|
||||
(PACKAGE_PATH / name).write_bytes(data)
|
||||
|
||||
hash_mapping = '\n'.join(hashes)
|
||||
for asset_name in ASSETS:
|
||||
assert asset_name in hash_mapping, f'{asset_name} not found in release'
|
||||
|
||||
assert all(wheel_info.get(key) for key in makefile_info), 'wheel info not found in release'
|
||||
|
||||
(PACKAGE_PATH / '_info.py').write_text(TEMPLATE.format(
|
||||
version=version,
|
||||
hash_mapping=hash_mapping,
|
||||
))
|
||||
|
||||
content = PYPROJECT_PATH.read_text()
|
||||
updated = content.replace(PREFIX + current_version, PREFIX + version)
|
||||
PYPROJECT_PATH.write_text(updated)
|
||||
|
||||
makefile = MAKEFILE_PATH.read_text()
|
||||
for key in wheel_info:
|
||||
makefile = makefile.replace(f'{key} = {makefile_info[key]}', f'{key} = {wheel_info[key]}')
|
||||
MAKEFILE_PATH.write_text(makefile)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,5 +1,7 @@
|
||||
import argparse
|
||||
import datetime as dt
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
|
||||
@@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
||||
return items[varname]
|
||||
|
||||
|
||||
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||
if version and '.' in version:
|
||||
return version
|
||||
|
||||
revision = version
|
||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||
else:
|
||||
old_version = read_version(fname=fname).split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||
|
||||
return f'{version}.{revision}' if revision else version
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
if has_infile:
|
||||
|
||||
@@ -9,15 +9,16 @@
|
||||
|
||||
import yt_dlp
|
||||
|
||||
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
|
||||
ZSH_COMPLETION_TEMPLATE = "devscripts/zsh-completion.in"
|
||||
ZSH_COMPLETION_FILE = 'completions/zsh/_yt-dlp'
|
||||
ZSH_COMPLETION_TEMPLATE = 'devscripts/zsh-completion.in'
|
||||
|
||||
|
||||
def build_completion(opt_parser):
|
||||
opts = [opt for group in opt_parser.option_groups
|
||||
for opt in group.option_list]
|
||||
opts_file = [opt for opt in opts if opt.metavar == "FILE"]
|
||||
opts_dir = [opt for opt in opts if opt.metavar == "DIR"]
|
||||
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
||||
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
||||
opts_path = [opt for opt in opts if opt.metavar == 'PATH']
|
||||
|
||||
fileopts = []
|
||||
for opt in opts_file:
|
||||
@@ -26,6 +27,12 @@ def build_completion(opt_parser):
|
||||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
for opt in opts_path:
|
||||
if opt._short_opts:
|
||||
fileopts.extend(opt._short_opts)
|
||||
if opt._long_opts:
|
||||
fileopts.extend(opt._long_opts)
|
||||
|
||||
diropts = []
|
||||
for opt in opts_dir:
|
||||
if opt._short_opts:
|
||||
@@ -38,11 +45,11 @@ def build_completion(opt_parser):
|
||||
with open(ZSH_COMPLETION_TEMPLATE) as f:
|
||||
template = f.read()
|
||||
|
||||
template = template.replace("{{fileopts}}", "|".join(fileopts))
|
||||
template = template.replace("{{diropts}}", "|".join(diropts))
|
||||
template = template.replace("{{flags}}", " ".join(flags))
|
||||
template = template.replace('{{fileopts}}', '|'.join(fileopts))
|
||||
template = template.replace('{{diropts}}', '|'.join(diropts))
|
||||
template = template.replace('{{flags}}', ' '.join(flags))
|
||||
|
||||
with open(ZSH_COMPLETION_FILE, "w") as f:
|
||||
with open(ZSH_COMPLETION_FILE, 'w') as f:
|
||||
f.write(template)
|
||||
|
||||
|
||||
|
||||
221
pyproject.toml
221
pyproject.toml
@@ -1,68 +1,75 @@
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
requires = ["hatchling>=1.27.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "yt-dlp"
|
||||
maintainers = [
|
||||
authors = [
|
||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||
]
|
||||
maintainers = [
|
||||
{email = "maintainers@yt-dlp.org"},
|
||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||
{name = "sepro", email = "sepro@sepr0.com"},
|
||||
]
|
||||
description = "A feature-rich command-line audio/video downloader"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
requires-python = ">=3.10"
|
||||
keywords = [
|
||||
"cli",
|
||||
"downloader",
|
||||
"youtube-dl",
|
||||
"video-downloader",
|
||||
"youtube-downloader",
|
||||
"sponsorblock",
|
||||
"youtube-dlc",
|
||||
"yt-dlp",
|
||||
]
|
||||
license = {file = "LICENSE"}
|
||||
license = "Unlicense"
|
||||
license-files = ["LICENSE"]
|
||||
classifiers = [
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Python :: Implementation",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
dependencies = [
|
||||
dependencies = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
default = [
|
||||
"brotli; implementation_name=='cpython'",
|
||||
"brotlicffi; implementation_name!='cpython'",
|
||||
"certifi",
|
||||
"mutagen",
|
||||
"pycryptodomex",
|
||||
"requests>=2.31.0,<3",
|
||||
"urllib3>=1.26.17,<3",
|
||||
"websockets>=12.0",
|
||||
"requests>=2.32.2,<3",
|
||||
"urllib3>=2.0.2,<3",
|
||||
"websockets>=13.0",
|
||||
"yt-dlp-ejs==0.3.1",
|
||||
]
|
||||
curl-cffi = [
|
||||
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.14; implementation_name=='cpython'",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
default = []
|
||||
curl-cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
|
||||
secretstorage = [
|
||||
"cffi",
|
||||
"secretstorage",
|
||||
]
|
||||
build = [
|
||||
"build",
|
||||
"hatchling",
|
||||
"hatchling>=1.27.0",
|
||||
"pip",
|
||||
"setuptools>=66.1.0,<70",
|
||||
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
"wheel",
|
||||
]
|
||||
dev = [
|
||||
@@ -72,25 +79,21 @@ dev = [
|
||||
]
|
||||
static-analysis = [
|
||||
"autopep8~=2.0",
|
||||
"ruff~=0.4.4",
|
||||
"ruff~=0.14.0",
|
||||
]
|
||||
test = [
|
||||
"pytest~=8.1",
|
||||
"pytest-rerunfailures~=14.0",
|
||||
]
|
||||
pyinstaller = [
|
||||
"pyinstaller>=6.3; sys_platform!='darwin'",
|
||||
"pyinstaller==5.13.2; sys_platform=='darwin'", # needed for curl_cffi
|
||||
]
|
||||
py2exe = [
|
||||
"py2exe>=0.12",
|
||||
"requests==2.31.*",
|
||||
"pyinstaller>=6.13.0", # Windows temp cleanup fixed in 6.13.0
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers"
|
||||
|
||||
[project.scripts]
|
||||
yt-dlp = "yt_dlp:main"
|
||||
@@ -108,7 +111,6 @@ include = [
|
||||
"/LICENSE", # included as license
|
||||
"/pyproject.toml", # included by default
|
||||
"/README.md", # included as readme
|
||||
"/setup.cfg",
|
||||
"/supportedsites.md",
|
||||
]
|
||||
artifacts = [
|
||||
@@ -121,7 +123,12 @@ artifacts = [
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["yt_dlp"]
|
||||
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||
artifacts = [
|
||||
"/yt_dlp/extractor/lazy_extractors.py",
|
||||
]
|
||||
exclude = [
|
||||
"/yt_dlp/**/*.md",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.wheel.shared-data]
|
||||
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||
@@ -160,7 +167,6 @@ lint-fix = "ruff check --fix {args:.}"
|
||||
features = ["test"]
|
||||
dependencies = [
|
||||
"pytest-randomly~=3.15",
|
||||
"pytest-rerunfailures~=14.0",
|
||||
"pytest-xdist[psutil]~=3.5",
|
||||
]
|
||||
|
||||
@@ -170,14 +176,12 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
||||
|
||||
[[tool.hatch.envs.hatch-test.matrix]]
|
||||
python = [
|
||||
"3.8",
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"pypy3.8",
|
||||
"pypy3.9",
|
||||
"pypy3.10",
|
||||
"3.13",
|
||||
"3.14",
|
||||
"pypy3.11",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
@@ -185,21 +189,86 @@ line-length = 120
|
||||
|
||||
[tool.ruff.lint]
|
||||
ignore = [
|
||||
"E402", # module level import not at top of file
|
||||
"E501", # line too long
|
||||
"E731", # do not assign a lambda expression, use a def
|
||||
"E741", # ambiguous variable name
|
||||
"E402", # module-import-not-at-top-of-file
|
||||
"E501", # line-too-long
|
||||
"E731", # lambda-assignment
|
||||
"E741", # ambiguous-variable-name
|
||||
"UP031", # printf-string-formatting
|
||||
"UP036", # outdated-version-block
|
||||
"B006", # mutable-argument-default
|
||||
"B008", # function-call-in-default-argument
|
||||
"B011", # assert-false
|
||||
"B017", # assert-raises-exception
|
||||
"B023", # function-uses-loop-variable (false positives)
|
||||
"B028", # no-explicit-stacklevel
|
||||
"B904", # raise-without-from-inside-except
|
||||
"A005", # stdlib-module-shadowing
|
||||
"C401", # unnecessary-generator-set
|
||||
"C402", # unnecessary-generator-dict
|
||||
"PIE790", # unnecessary-placeholder
|
||||
"SIM102", # collapsible-if
|
||||
"SIM108", # if-else-block-instead-of-if-exp
|
||||
"SIM112", # uncapitalized-environment-variables
|
||||
"SIM113", # enumerate-for-loop
|
||||
"SIM114", # if-with-same-arms
|
||||
"SIM115", # open-file-with-context-handler
|
||||
"SIM117", # multiple-with-statements
|
||||
"SIM223", # expr-and-false
|
||||
"SIM300", # yoda-conditions
|
||||
"TD001", # invalid-todo-tag
|
||||
"TD002", # missing-todo-author
|
||||
"TD003", # missing-todo-link
|
||||
"PLC0415", # import-outside-top-level
|
||||
"PLE0604", # invalid-all-object (false positives)
|
||||
"PLE0643", # potential-index-error (false positives)
|
||||
"PLW0603", # global-statement
|
||||
"PLW1510", # subprocess-run-without-check
|
||||
"PLW1641", # eq-without-hash
|
||||
"PLW2901", # redefined-loop-name
|
||||
"RUF001", # ambiguous-unicode-character-string
|
||||
"RUF012", # mutable-class-default
|
||||
"RUF100", # unused-noqa (flake8 has slightly different behavior)
|
||||
]
|
||||
select = [
|
||||
"E", # pycodestyle errors
|
||||
"W", # pycodestyle warnings
|
||||
"F", # pyflakes
|
||||
"I", # import order
|
||||
"E", # pycodestyle Error
|
||||
"W", # pycodestyle Warning
|
||||
"F", # Pyflakes
|
||||
"I", # isort
|
||||
"Q", # flake8-quotes
|
||||
"N803", # invalid-argument-name
|
||||
"N804", # invalid-first-argument-name-for-class-method
|
||||
"UP", # pyupgrade
|
||||
"B", # flake8-bugbear
|
||||
"A", # flake8-builtins
|
||||
"COM", # flake8-commas
|
||||
"C4", # flake8-comprehensions
|
||||
"FA", # flake8-future-annotations
|
||||
"ISC", # flake8-implicit-str-concat
|
||||
"ICN003", # banned-import-from
|
||||
"PIE", # flake8-pie
|
||||
"T20", # flake8-print
|
||||
"RSE", # flake8-raise
|
||||
"RET504", # unnecessary-assign
|
||||
"SIM", # flake8-simplify
|
||||
"TID251", # banned-api
|
||||
"TD", # flake8-todos
|
||||
"PLC", # Pylint Convention
|
||||
"PLE", # Pylint Error
|
||||
"PLW", # Pylint Warning
|
||||
"RUF", # Ruff-specific rules
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"devscripts/lazy_load_template.py" = ["F401"]
|
||||
"!yt_dlp/extractor/**.py" = ["I"]
|
||||
"devscripts/lazy_load_template.py" = [
|
||||
"F401", # unused-import
|
||||
]
|
||||
"!yt_dlp/extractor/**.py" = [
|
||||
"I", # isort
|
||||
"ICN003", # banned-import-from
|
||||
"T20", # flake8-print
|
||||
"A002", # builtin-argument-shadowing
|
||||
"C408", # unnecessary-collection-call
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
known-first-party = [
|
||||
@@ -209,6 +278,61 @@ known-first-party = [
|
||||
]
|
||||
relative-imports-order = "closest-to-furthest"
|
||||
|
||||
[tool.ruff.lint.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
multiline-quotes = "single"
|
||||
inline-quotes = "single"
|
||||
avoid-escape = false
|
||||
|
||||
[tool.ruff.lint.pep8-naming]
|
||||
classmethod-decorators = [
|
||||
"yt_dlp.utils.classproperty",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-import-conventions]
|
||||
banned-from = [
|
||||
"base64",
|
||||
"datetime",
|
||||
"functools",
|
||||
"glob",
|
||||
"hashlib",
|
||||
"itertools",
|
||||
"json",
|
||||
"math",
|
||||
"os",
|
||||
"pathlib",
|
||||
"random",
|
||||
"re",
|
||||
"string",
|
||||
"sys",
|
||||
"time",
|
||||
"urllib.parse",
|
||||
"uuid",
|
||||
"xml",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||
"yt_dlp.compat.compat_str".msg = "Use `str` instead."
|
||||
"yt_dlp.compat.compat_b64decode".msg = "Use `base64.b64decode` instead."
|
||||
"yt_dlp.compat.compat_urlparse".msg = "Use `urllib.parse` instead."
|
||||
"yt_dlp.compat.compat_parse_qs".msg = "Use `urllib.parse.parse_qs` instead."
|
||||
"yt_dlp.compat.compat_urllib_parse_unquote".msg = "Use `urllib.parse.unquote` instead."
|
||||
"yt_dlp.compat.compat_urllib_parse_urlencode".msg = "Use `urllib.parse.urlencode` instead."
|
||||
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
||||
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
||||
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
||||
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
||||
"yt_dlp.utils.jwt_encode_hs256".msg = "Use `yt_dlp.utils.jwt_encode` instead."
|
||||
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
||||
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
||||
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
||||
"yt_dlp.compat.compat_os_name".msg = "Use `os.name` instead."
|
||||
"yt_dlp.compat.compat_realpath".msg = "Use `os.path.realpath` instead."
|
||||
"yt_dlp.compat.functools".msg = "Use `functools` instead."
|
||||
"yt_dlp.utils.decodeOption".msg = "Do not use"
|
||||
"yt_dlp.utils.compiled_regex_type".msg = "Use `re.Pattern` instead."
|
||||
|
||||
[tool.autopep8]
|
||||
max_line_length = 120
|
||||
recursive = true
|
||||
@@ -270,9 +394,14 @@ select = [
|
||||
"W391",
|
||||
"W504",
|
||||
]
|
||||
exclude = "*/extractor/lazy_extractors.py,*venv*,*/test/testdata/sigs/player-*.js,.idea,.vscode"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-ra -v --strict-markers"
|
||||
addopts = [
|
||||
"-ra", # summary: all except passed
|
||||
"--verbose",
|
||||
"--strict-markers",
|
||||
]
|
||||
markers = [
|
||||
"download",
|
||||
]
|
||||
|
||||
39
setup.cfg
39
setup.cfg
@@ -1,39 +0,0 @@
|
||||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{38,39,310,311,312},pypy{38,39,310}
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 38
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,8 +22,8 @@ def handler(request):
|
||||
class HandlerWrapper(handler):
|
||||
RH_KEY = handler.RH_KEY
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(logger=FakeLogger, *args, **kwargs)
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(logger=FakeLogger, **kwargs)
|
||||
|
||||
return HandlerWrapper
|
||||
|
||||
@@ -52,13 +52,43 @@ def skip_handlers_if(request, handler):
|
||||
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def handler_flaky(request, handler):
|
||||
"""Mark a certain handler as being flaky.
|
||||
|
||||
This will skip the test if pytest does not get run using `--allow-flaky`
|
||||
|
||||
usage:
|
||||
pytest.mark.handler_flaky('my_handler', os.name != 'nt', reason='reason')
|
||||
"""
|
||||
for marker in request.node.iter_markers(handler_flaky.__name__):
|
||||
if (
|
||||
marker.args[0] == handler.RH_KEY
|
||||
and (not marker.args[1:] or any(marker.args[1:]))
|
||||
and request.config.getoption('disallow_flaky')
|
||||
):
|
||||
reason = marker.kwargs.get('reason')
|
||||
pytest.skip(f'flaky: {reason}' if reason else 'flaky')
|
||||
|
||||
|
||||
def pytest_addoption(parser, pluginmanager):
|
||||
parser.addoption(
|
||||
'--disallow-flaky',
|
||||
action='store_true',
|
||||
help='disallow flaky tests from running.',
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
"markers", "skip_handler(handler): skip test for the given handler",
|
||||
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "skip_handler_if(handler): skip test for the given handler if condition is true"
|
||||
'markers', 'skip_handler_if(handler): skip test for the given handler if condition is true',
|
||||
)
|
||||
config.addinivalue_line(
|
||||
"markers", "skip_handlers_if(handler): skip test for handlers when the condition is true"
|
||||
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||
)
|
||||
config.addinivalue_line(
|
||||
'markers', 'handler_flaky(handler): mark handler as flaky if condition is true',
|
||||
)
|
||||
|
||||
223
test/helper.py
223
test/helper.py
@@ -9,15 +9,14 @@
|
||||
|
||||
import yt_dlp.extractor
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
||||
|
||||
if 'pytest' in sys.modules:
|
||||
import pytest
|
||||
is_download_test = pytest.mark.download
|
||||
else:
|
||||
def is_download_test(testClass):
|
||||
return testClass
|
||||
def is_download_test(test_class):
|
||||
return test_class
|
||||
|
||||
|
||||
def get_params(override=None):
|
||||
@@ -45,11 +44,11 @@ def try_rm(filename):
|
||||
|
||||
|
||||
def report_warning(message, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||
If stderr is a tty file the 'WARNING:' will be colored
|
||||
'''
|
||||
if sys.stderr.isatty() and compat_os_name != 'nt':
|
||||
"""
|
||||
if sys.stderr.isatty() and os.name != 'nt':
|
||||
_msg_header = '\033[0;33mWARNING:\033[0m'
|
||||
else:
|
||||
_msg_header = 'WARNING:'
|
||||
@@ -102,88 +101,109 @@ def getwebpagetestcases():
|
||||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||
|
||||
|
||||
def expect_value(self, got, expected, field):
|
||||
if isinstance(expected, str) and expected.startswith('re:'):
|
||||
match_str = expected[len('re:'):]
|
||||
match_rex = re.compile(match_str)
|
||||
def _iter_differences(got, expected, field):
|
||||
if isinstance(expected, str):
|
||||
op, _, val = expected.partition(':')
|
||||
if op in ('mincount', 'maxcount', 'count'):
|
||||
if not isinstance(got, (list, dict)):
|
||||
yield field, f'expected either {list.__name__} or {dict.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
match_rex.match(got),
|
||||
f'field {field} (value: {got!r}) should match {match_str!r}')
|
||||
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
||||
start_str = expected[len('startswith:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
got.startswith(start_str),
|
||||
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
||||
elif isinstance(expected, str) and expected.startswith('contains:'):
|
||||
contains_str = expected[len('contains:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
contains_str in got,
|
||||
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
||||
elif isinstance(expected, type):
|
||||
self.assertTrue(
|
||||
isinstance(got, expected),
|
||||
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
|
||||
elif isinstance(expected, dict) and isinstance(got, dict):
|
||||
expect_dict(self, got, expected)
|
||||
elif isinstance(expected, list) and isinstance(got, list):
|
||||
self.assertEqual(
|
||||
len(expected), len(got),
|
||||
'Expect a list of length %d, but got a list of length %d for field %s' % (
|
||||
len(expected), len(got), field))
|
||||
for index, (item_got, item_expected) in enumerate(zip(got, expected)):
|
||||
type_got = type(item_got)
|
||||
type_expected = type(item_expected)
|
||||
self.assertEqual(
|
||||
type_expected, type_got,
|
||||
'Type mismatch for list item at index %d for field %s, expected %r, got %r' % (
|
||||
index, field, type_expected, type_got))
|
||||
expect_value(self, item_got, item_expected, field)
|
||||
else:
|
||||
if isinstance(expected, str) and expected.startswith('md5:'):
|
||||
self.assertTrue(
|
||||
isinstance(got, str),
|
||||
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
||||
got = 'md5:' + md5(got)
|
||||
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
||||
self.assertTrue(
|
||||
isinstance(got, (list, dict)),
|
||||
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
||||
op, _, expected_num = expected.partition(':')
|
||||
expected_num = int(expected_num)
|
||||
expected_num = int(val)
|
||||
got_num = len(got)
|
||||
if op == 'mincount':
|
||||
assert_func = assertGreaterEqual
|
||||
msg_tmpl = 'Expected %d items in field %s, but only got %d'
|
||||
elif op == 'maxcount':
|
||||
assert_func = assertLessEqual
|
||||
msg_tmpl = 'Expected maximum %d items in field %s, but got %d'
|
||||
elif op == 'count':
|
||||
assert_func = assertEqual
|
||||
msg_tmpl = 'Expected exactly %d items in field %s, but got %d'
|
||||
else:
|
||||
assert False
|
||||
assert_func(
|
||||
self, len(got), expected_num,
|
||||
msg_tmpl % (expected_num, field, len(got)))
|
||||
if got_num < expected_num:
|
||||
yield field, f'expected at least {val} items, got {got_num}'
|
||||
return
|
||||
|
||||
if op == 'maxcount':
|
||||
if got_num > expected_num:
|
||||
yield field, f'expected at most {val} items, got {got_num}'
|
||||
return
|
||||
|
||||
assert op == 'count'
|
||||
if got_num != expected_num:
|
||||
yield field, f'expected exactly {val} items, got {got_num}'
|
||||
return
|
||||
self.assertEqual(
|
||||
expected, got,
|
||||
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
|
||||
|
||||
if not isinstance(got, str):
|
||||
yield field, f'expected {str.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
if op == 're':
|
||||
if not re.match(val, got):
|
||||
yield field, f'should match {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'startswith':
|
||||
if not got.startswith(val):
|
||||
yield field, f'should start with {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'contains':
|
||||
if not val.startswith(got):
|
||||
yield field, f'should contain {val!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if op == 'md5':
|
||||
hash_val = md5(got)
|
||||
if hash_val != val:
|
||||
yield field, f'expected hash {val}, got {hash_val}'
|
||||
return
|
||||
|
||||
if got != expected:
|
||||
yield field, f'expected {expected!r}, got {got!r}'
|
||||
return
|
||||
|
||||
if isinstance(expected, dict) and isinstance(got, dict):
|
||||
for key, expected_val in expected.items():
|
||||
if key not in got:
|
||||
yield field, f'missing key: {key!r}'
|
||||
continue
|
||||
|
||||
field_name = key if field is None else f'{field}.{key}'
|
||||
yield from _iter_differences(got[key], expected_val, field_name)
|
||||
return
|
||||
|
||||
if isinstance(expected, type):
|
||||
if not isinstance(got, expected):
|
||||
yield field, f'expected {expected.__name__}, got {type(got).__name__}'
|
||||
return
|
||||
|
||||
if isinstance(expected, list) and isinstance(got, list):
|
||||
# TODO: clever diffing algorithm lmao
|
||||
if len(expected) != len(got):
|
||||
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||
return
|
||||
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||
field_name = str(index) if field is None else f'{field}.{index}'
|
||||
yield from _iter_differences(got_val, expected_val, field_name)
|
||||
return
|
||||
|
||||
if got != expected:
|
||||
yield field, f'expected {expected!r}, got {got!r}'
|
||||
|
||||
|
||||
def _expect_value(message, got, expected, field):
|
||||
mismatches = list(_iter_differences(got, expected, field))
|
||||
if not mismatches:
|
||||
return
|
||||
|
||||
fields = [field for field, _ in mismatches if field is not None]
|
||||
return ''.join((
|
||||
message, f' ({", ".join(fields)})' if fields else '',
|
||||
*(f'\n\t{field}: {message}' for field, message in mismatches)))
|
||||
|
||||
|
||||
def expect_value(self, got, expected, field):
|
||||
if message := _expect_value('values differ', got, expected, field):
|
||||
self.fail(message)
|
||||
|
||||
|
||||
def expect_dict(self, got_dict, expected_dict):
|
||||
for info_field, expected in expected_dict.items():
|
||||
got = got_dict.get(info_field)
|
||||
expect_value(self, got, expected, info_field)
|
||||
if message := _expect_value('dictionaries differ', got_dict, expected_dict, None):
|
||||
self.fail(message)
|
||||
|
||||
|
||||
def sanitize_got_info_dict(got_dict):
|
||||
@@ -224,7 +244,7 @@ def sanitize(key, value):
|
||||
test_info_dict.pop('display_id')
|
||||
|
||||
# Remove deprecated fields
|
||||
for old in YoutubeDL._deprecated_multivalue_fields.keys():
|
||||
for old in YoutubeDL._deprecated_multivalue_fields:
|
||||
test_info_dict.pop(old, None)
|
||||
|
||||
# release_year may be generated from release_date
|
||||
@@ -239,6 +259,20 @@ def sanitize(key, value):
|
||||
|
||||
|
||||
def expect_info_dict(self, got_dict, expected_dict):
|
||||
ALLOWED_KEYS_SORT_ORDER = (
|
||||
# NB: Keep in sync with the docstring of extractor/common.py
|
||||
'id', 'ext', 'direct', 'display_id', 'title', 'alt_title', 'description', 'media_type',
|
||||
'uploader', 'uploader_id', 'uploader_url', 'channel', 'channel_id', 'channel_url', 'channel_is_verified',
|
||||
'channel_follower_count', 'comment_count', 'view_count', 'concurrent_view_count',
|
||||
'like_count', 'dislike_count', 'repost_count', 'average_rating', 'age_limit', 'duration', 'thumbnail', 'heatmap',
|
||||
'chapters', 'chapter', 'chapter_number', 'chapter_id', 'start_time', 'end_time', 'section_start', 'section_end',
|
||||
'categories', 'tags', 'cast', 'composers', 'artists', 'album_artists', 'creators', 'genres',
|
||||
'track', 'track_number', 'track_id', 'album', 'album_type', 'disc_number',
|
||||
'series', 'series_id', 'season', 'season_number', 'season_id', 'episode', 'episode_number', 'episode_id',
|
||||
'timestamp', 'upload_date', 'release_timestamp', 'release_date', 'release_year', 'modified_timestamp', 'modified_date',
|
||||
'playable_in_embed', 'availability', 'live_status', 'location', 'license', '_old_archive_ids',
|
||||
)
|
||||
|
||||
expect_dict(self, got_dict, expected_dict)
|
||||
# Check for the presence of mandatory fields
|
||||
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
||||
@@ -246,19 +280,25 @@ def expect_info_dict(self, got_dict, expected_dict):
|
||||
if expected_dict.get('ext'):
|
||||
mandatory_fields.extend(('url', 'ext'))
|
||||
for key in mandatory_fields:
|
||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
||||
self.assertTrue(got_dict.get(key), f'Missing mandatory field {key}')
|
||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||
if got_dict.get('_type', 'video') == 'video':
|
||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||
self.assertTrue(got_dict.get(key), f'Missing field: {key}')
|
||||
|
||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||
|
||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
||||
# Check for invalid/misspelled field names being returned by the extractor
|
||||
invalid_keys = sorted(test_info_dict.keys() - ALLOWED_KEYS_SORT_ORDER)
|
||||
self.assertFalse(invalid_keys, f'Invalid fields returned by the extractor: {", ".join(invalid_keys)}')
|
||||
|
||||
missing_keys = sorted(
|
||||
test_info_dict.keys() - expected_dict.keys(),
|
||||
key=lambda x: ALLOWED_KEYS_SORT_ORDER.index(x))
|
||||
if missing_keys:
|
||||
def _repr(v):
|
||||
if isinstance(v, str):
|
||||
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
|
||||
return "'{}'".format(v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n'))
|
||||
elif isinstance(v, type):
|
||||
return v.__name__
|
||||
else:
|
||||
@@ -275,8 +315,7 @@ def _repr(v):
|
||||
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||
self.assertFalse(
|
||||
missing_keys,
|
||||
'Missing keys in test definition: %s' % (
|
||||
', '.join(sorted(missing_keys))))
|
||||
'Missing keys in test definition: {}'.format(', '.join(sorted(missing_keys))))
|
||||
|
||||
|
||||
def assertRegexpMatches(self, text, regexp, msg=None):
|
||||
@@ -285,9 +324,9 @@ def assertRegexpMatches(self, text, regexp, msg=None):
|
||||
else:
|
||||
m = re.match(regexp, text)
|
||||
if not m:
|
||||
note = 'Regexp didn\'t match: %r not found' % (regexp)
|
||||
note = f'Regexp didn\'t match: {regexp!r} not found'
|
||||
if len(text) < 1000:
|
||||
note += ' in %r' % text
|
||||
note += f' in {text!r}'
|
||||
if msg is None:
|
||||
msg = note
|
||||
else:
|
||||
@@ -310,7 +349,7 @@ def assertLessEqual(self, got, expected, msg=None):
|
||||
|
||||
|
||||
def assertEqual(self, got, expected, msg=None):
|
||||
if not (got == expected):
|
||||
if got != expected:
|
||||
if msg is None:
|
||||
msg = f'{got!r} not equal to {expected!r}'
|
||||
self.assertTrue(got == expected, msg)
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
"verbose": true,
|
||||
"writedescription": false,
|
||||
"writeinfojson": true,
|
||||
"writeannotations": false,
|
||||
"writelink": false,
|
||||
"writeurllink": false,
|
||||
"writewebloclink": false,
|
||||
|
||||
@@ -36,6 +36,18 @@ def do_GET(self):
|
||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||
self.end_headers()
|
||||
self.wfile.write(TEAPOT_RESPONSE_BODY.encode())
|
||||
elif self.path == '/fake.m3u8':
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Length', '1024')
|
||||
self.end_headers()
|
||||
self.wfile.write(1024 * b'\x00')
|
||||
elif self.path == '/bipbop.m3u8':
|
||||
with open('test/testdata/m3u8/bipbop_16x9.m3u8', 'rb') as f:
|
||||
data = f.read()
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Length', str(len(data)))
|
||||
self.end_headers()
|
||||
self.wfile.write(data)
|
||||
else:
|
||||
assert False
|
||||
|
||||
@@ -53,6 +65,18 @@ def setUp(self):
|
||||
def test_ie_key(self):
|
||||
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
||||
|
||||
def test_get_netrc_login_info(self):
|
||||
for params in [
|
||||
{'usenetrc': True, 'netrc_location': './test/testdata/netrc/netrc'},
|
||||
{'netrc_cmd': f'{sys.executable} ./test/testdata/netrc/print_netrc.py'},
|
||||
]:
|
||||
ie = DummyIE(FakeYDL(params))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='normal_use'), ('user', 'pass'))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_user'), ('', 'pass'))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_pass'), ('user', ''))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='both_empty'), ('', ''))
|
||||
self.assertEqual(ie._get_netrc_login_info(netrc_machine='nonexistent'), (None, None))
|
||||
|
||||
def test_html_search_regex(self):
|
||||
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
||||
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
||||
@@ -262,19 +286,19 @@ def test_search_json_ld_realworld(self):
|
||||
''',
|
||||
{
|
||||
'chapters': [
|
||||
{"title": "Explosie Turnhout", "start_time": 70, "end_time": 440},
|
||||
{"title": "Jaarwisseling", "start_time": 440, "end_time": 1179},
|
||||
{"title": "Natuurbranden Colorado", "start_time": 1179, "end_time": 1263},
|
||||
{"title": "Klimaatverandering", "start_time": 1263, "end_time": 1367},
|
||||
{"title": "Zacht weer", "start_time": 1367, "end_time": 1383},
|
||||
{"title": "Financiële balans", "start_time": 1383, "end_time": 1484},
|
||||
{"title": "Club Brugge", "start_time": 1484, "end_time": 1575},
|
||||
{"title": "Mentale gezondheid bij topsporters", "start_time": 1575, "end_time": 1728},
|
||||
{"title": "Olympische Winterspelen", "start_time": 1728, "end_time": 1873},
|
||||
{"title": "Sober oudjaar in Nederland", "start_time": 1873, "end_time": 2079.23}
|
||||
{'title': 'Explosie Turnhout', 'start_time': 70, 'end_time': 440},
|
||||
{'title': 'Jaarwisseling', 'start_time': 440, 'end_time': 1179},
|
||||
{'title': 'Natuurbranden Colorado', 'start_time': 1179, 'end_time': 1263},
|
||||
{'title': 'Klimaatverandering', 'start_time': 1263, 'end_time': 1367},
|
||||
{'title': 'Zacht weer', 'start_time': 1367, 'end_time': 1383},
|
||||
{'title': 'Financiële balans', 'start_time': 1383, 'end_time': 1484},
|
||||
{'title': 'Club Brugge', 'start_time': 1484, 'end_time': 1575},
|
||||
{'title': 'Mentale gezondheid bij topsporters', 'start_time': 1575, 'end_time': 1728},
|
||||
{'title': 'Olympische Winterspelen', 'start_time': 1728, 'end_time': 1873},
|
||||
{'title': 'Sober oudjaar in Nederland', 'start_time': 1873, 'end_time': 2079.23},
|
||||
],
|
||||
'title': 'Het journaal - Aflevering 365 (Seizoen 2021)'
|
||||
}, {}
|
||||
'title': 'Het journaal - Aflevering 365 (Seizoen 2021)',
|
||||
}, {},
|
||||
),
|
||||
(
|
||||
# test multiple thumbnails in a list
|
||||
@@ -301,13 +325,27 @@ def test_search_json_ld_realworld(self):
|
||||
'thumbnails': [{'url': 'https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg'}],
|
||||
},
|
||||
{},
|
||||
)
|
||||
),
|
||||
(
|
||||
# test thumbnail_url key without URL scheme
|
||||
r'''
|
||||
<script type="application/ld+json">
|
||||
{
|
||||
"@context": "https://schema.org",
|
||||
"@type": "VideoObject",
|
||||
"thumbnail_url": "//www.nobelprize.org/images/12693-landscape-medium-gallery.jpg"
|
||||
}</script>''',
|
||||
{
|
||||
'thumbnails': [{'url': 'https://www.nobelprize.org/images/12693-landscape-medium-gallery.jpg'}],
|
||||
},
|
||||
{},
|
||||
),
|
||||
]
|
||||
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
||||
expect_dict(
|
||||
self,
|
||||
self.ie._search_json_ld(html, None, **search_json_ld_kwargs),
|
||||
expected_dict
|
||||
expected_dict,
|
||||
)
|
||||
|
||||
def test_download_json(self):
|
||||
@@ -366,7 +404,7 @@ def test_parse_html5_media_entries(self):
|
||||
'height': 740,
|
||||
'tbr': 1500,
|
||||
}],
|
||||
'thumbnail': '//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg'
|
||||
'thumbnail': '//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg',
|
||||
})
|
||||
|
||||
# from https://www.csfd.cz/
|
||||
@@ -419,9 +457,9 @@ def test_parse_html5_media_entries(self):
|
||||
'height': 1080,
|
||||
}],
|
||||
'subtitles': {
|
||||
'cs': [{'url': 'https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt'}]
|
||||
'cs': [{'url': 'https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt'}],
|
||||
},
|
||||
'thumbnail': 'https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360'
|
||||
'thumbnail': 'https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360',
|
||||
})
|
||||
|
||||
# from https://tamasha.com/v/Kkdjw
|
||||
@@ -452,7 +490,7 @@ def test_parse_html5_media_entries(self):
|
||||
'ext': 'mp4',
|
||||
'format_id': '144p',
|
||||
'height': 144,
|
||||
}]
|
||||
}],
|
||||
})
|
||||
|
||||
# from https://www.directvnow.com
|
||||
@@ -470,7 +508,7 @@ def test_parse_html5_media_entries(self):
|
||||
'formats': [{
|
||||
'ext': 'mp4',
|
||||
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
||||
}]
|
||||
}],
|
||||
})
|
||||
|
||||
# from https://www.directvnow.com
|
||||
@@ -488,7 +526,7 @@ def test_parse_html5_media_entries(self):
|
||||
'formats': [{
|
||||
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
||||
'ext': 'mp4',
|
||||
}]
|
||||
}],
|
||||
})
|
||||
|
||||
# from https://www.klarna.com/uk/
|
||||
@@ -547,8 +585,8 @@ def test_extract_jwplayer_data_realworld(self):
|
||||
'id': 'XEgvuql4',
|
||||
'formats': [{
|
||||
'url': 'rtmp://192.138.214.154/live/sjclive',
|
||||
'ext': 'flv'
|
||||
}]
|
||||
'ext': 'flv',
|
||||
}],
|
||||
})
|
||||
|
||||
# from https://www.pornoxo.com/videos/7564/striptease-from-sexy-secretary/
|
||||
@@ -588,8 +626,8 @@ def test_extract_jwplayer_data_realworld(self):
|
||||
'thumbnail': 'https://t03.vipstreamservice.com/thumbs/pxo-full/2009-12/14/a4b2157147afe5efa93ce1978e0265289c193874e02597.flv-full-13.jpg',
|
||||
'formats': [{
|
||||
'url': 'https://cdn.pornoxo.com/key=MF+oEbaxqTKb50P-w9G3nA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/4b2157147afe5efa93ce1978e0265289c193874e02597.flv',
|
||||
'ext': 'flv'
|
||||
}]
|
||||
'ext': 'flv',
|
||||
}],
|
||||
})
|
||||
|
||||
# from http://www.indiedb.com/games/king-machine/videos
|
||||
@@ -610,12 +648,12 @@ def test_extract_jwplayer_data_realworld(self):
|
||||
'formats': [{
|
||||
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode_mp4/king-machine-trailer.mp4',
|
||||
'height': 360,
|
||||
'ext': 'mp4'
|
||||
'ext': 'mp4',
|
||||
}, {
|
||||
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode720p_mp4/king-machine-trailer.mp4',
|
||||
'height': 720,
|
||||
'ext': 'mp4'
|
||||
}]
|
||||
'ext': 'mp4',
|
||||
}],
|
||||
})
|
||||
|
||||
def test_parse_m3u8_formats(self):
|
||||
@@ -626,6 +664,7 @@ def test_parse_m3u8_formats(self):
|
||||
'img_bipbop_adv_example_fmp4',
|
||||
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
[{
|
||||
# 60kbps (bitrate not provided in m3u8); sorted as worst because it's grouped with lowest bitrate video track
|
||||
'format_id': 'aud1-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
@@ -633,15 +672,9 @@ def test_parse_m3u8_formats(self):
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 0,
|
||||
}, {
|
||||
'format_id': 'aud2-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
'language': 'en',
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
}, {
|
||||
# 192kbps (bitrate not provided in m3u8)
|
||||
'format_id': 'aud3-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
@@ -649,6 +682,17 @@ def test_parse_m3u8_formats(self):
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 1,
|
||||
}, {
|
||||
# 384kbps (bitrate not provided in m3u8); sorted as best because it's grouped with the highest bitrate video track
|
||||
'format_id': 'aud2-English',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||
'language': 'en',
|
||||
'ext': 'mp4',
|
||||
'protocol': 'm3u8_native',
|
||||
'audio_ext': 'mp4',
|
||||
'source_preference': 2,
|
||||
}, {
|
||||
'format_id': '530',
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
||||
@@ -866,7 +910,7 @@ def test_parse_m3u8_formats(self):
|
||||
'height': 1080,
|
||||
'vcodec': 'avc1.64002a',
|
||||
}],
|
||||
{}
|
||||
{},
|
||||
),
|
||||
(
|
||||
'bipbop_16x9',
|
||||
@@ -990,45 +1034,45 @@ def test_parse_m3u8_formats(self):
|
||||
'en': [{
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}, {
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng_forced/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}],
|
||||
'fr': [{
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}, {
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra_forced/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}],
|
||||
'es': [{
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}, {
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa_forced/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}],
|
||||
'ja': [{
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}, {
|
||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn_forced/prog_index.m3u8',
|
||||
'ext': 'vtt',
|
||||
'protocol': 'm3u8_native'
|
||||
'protocol': 'm3u8_native',
|
||||
}],
|
||||
}
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
for m3u8_file, m3u8_url, expected_formats, expected_subs in _TEST_CASES:
|
||||
with open('./test/testdata/m3u8/%s.m3u8' % m3u8_file, encoding='utf-8') as f:
|
||||
with open(f'./test/testdata/m3u8/{m3u8_file}.m3u8', encoding='utf-8') as f:
|
||||
formats, subs = self.ie._parse_m3u8_formats_and_subtitles(
|
||||
f.read(), m3u8_url, ext='mp4')
|
||||
self.ie._sort_formats(formats)
|
||||
@@ -1366,14 +1410,14 @@ def test_parse_mpd_formats(self):
|
||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
|
||||
'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
|
||||
'protocol': 'http_dash_segments',
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
for mpd_file, mpd_url, mpd_base_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||
with open('./test/testdata/mpd/%s.mpd' % mpd_file, encoding='utf-8') as f:
|
||||
with open(f'./test/testdata/mpd/{mpd_file}.mpd', encoding='utf-8') as f:
|
||||
formats, subtitles = self.ie._parse_mpd_formats_and_subtitles(
|
||||
compat_etree_fromstring(f.read().encode()),
|
||||
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
|
||||
@@ -1408,7 +1452,7 @@ def test_parse_ism_formats(self):
|
||||
'sampling_rate': 48000,
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video-100',
|
||||
@@ -1431,7 +1475,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401FDA0544EFFC2D002CBC40000003004000000C03C60CA80000000168EF32C8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video-326',
|
||||
@@ -1454,7 +1498,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401FDA0241FE23FFC3BC83BA44000003000400000300C03C60CA800000000168EF32C8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video-698',
|
||||
@@ -1477,7 +1521,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401FDA0350BFB97FF06AF06AD1000003000100000300300F1832A00000000168EF32C8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video-1493',
|
||||
@@ -1500,7 +1544,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401FDA011C3DE6FFF0D890D871000003000100000300300F1832A00000000168EF32C8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video-4482',
|
||||
@@ -1523,7 +1567,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401FDA01A816F97FFC1ABC1AB440000003004000000C03C60CA80000000168EF32C8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}],
|
||||
{
|
||||
@@ -1538,10 +1582,10 @@ def test_parse_ism_formats(self):
|
||||
'duration': 8880746666,
|
||||
'timescale': 10000000,
|
||||
'fourcc': 'TTML',
|
||||
'codec_private_data': ''
|
||||
}
|
||||
}
|
||||
]
|
||||
'codec_private_data': '',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
(
|
||||
@@ -1571,7 +1615,7 @@ def test_parse_ism_formats(self):
|
||||
'sampling_rate': 48000,
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'audio_deu_1-224',
|
||||
@@ -1597,7 +1641,7 @@ def test_parse_ism_formats(self):
|
||||
'sampling_rate': 48000,
|
||||
'channels': 6,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-23',
|
||||
@@ -1622,7 +1666,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-403',
|
||||
@@ -1647,7 +1691,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-680',
|
||||
@@ -1672,7 +1716,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-1253',
|
||||
@@ -1698,7 +1742,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-2121',
|
||||
@@ -1723,7 +1767,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-3275',
|
||||
@@ -1748,7 +1792,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-5300',
|
||||
@@ -1773,7 +1817,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}, {
|
||||
'format_id': 'video_deu-8079',
|
||||
@@ -1798,7 +1842,7 @@ def test_parse_ism_formats(self):
|
||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
'nal_unit_length_field': 4,
|
||||
},
|
||||
}],
|
||||
{},
|
||||
@@ -1806,7 +1850,7 @@ def test_parse_ism_formats(self):
|
||||
]
|
||||
|
||||
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||
with open('./test/testdata/ism/%s.Manifest' % ism_file, encoding='utf-8') as f:
|
||||
with open(f'./test/testdata/ism/{ism_file}.Manifest', encoding='utf-8') as f:
|
||||
formats, subtitles = self.ie._parse_ism_formats_and_subtitles(
|
||||
compat_etree_fromstring(f.read().encode()), ism_url=ism_url)
|
||||
self.ie._sort_formats(formats)
|
||||
@@ -1827,12 +1871,12 @@ def test_parse_f4m_formats(self):
|
||||
'tbr': 2148,
|
||||
'width': 1280,
|
||||
'height': 720,
|
||||
}]
|
||||
}],
|
||||
),
|
||||
]
|
||||
|
||||
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
|
||||
with open('./test/testdata/f4m/%s.f4m' % f4m_file, encoding='utf-8') as f:
|
||||
with open(f'./test/testdata/f4m/{f4m_file}.f4m', encoding='utf-8') as f:
|
||||
formats = self.ie._parse_f4m_formats(
|
||||
compat_etree_fromstring(f.read().encode()),
|
||||
f4m_url, None)
|
||||
@@ -1873,13 +1917,13 @@ def test_parse_xspf(self):
|
||||
}, {
|
||||
'manifest_url': 'https://example.org/src/foo_xspf.xspf',
|
||||
'url': 'https://example.com/track3.mp3',
|
||||
}]
|
||||
}]
|
||||
}],
|
||||
}],
|
||||
),
|
||||
]
|
||||
|
||||
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
|
||||
with open('./test/testdata/xspf/%s.xspf' % xspf_file, encoding='utf-8') as f:
|
||||
with open(f'./test/testdata/xspf/{xspf_file}.xspf', encoding='utf-8') as f:
|
||||
entries = self.ie._parse_xspf(
|
||||
compat_etree_fromstring(f.read().encode()),
|
||||
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
|
||||
@@ -1901,8 +1945,8 @@ def test_response_with_expected_status_returns_content(self):
|
||||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
|
||||
(content, urlh) = self.ie._download_webpage_handle(
|
||||
'http://127.0.0.1:%d/teapot' % port, None,
|
||||
content, _ = self.ie._download_webpage_handle(
|
||||
f'http://127.0.0.1:{port}/teapot', None,
|
||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||
|
||||
@@ -1915,6 +1959,208 @@ def test_search_nextjs_data(self):
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
||||
|
||||
def test_search_nextjs_v13_data(self):
|
||||
HTML = R'''
|
||||
<script>(self.__next_f=self.__next_f||[]).push([0])</script>
|
||||
<script>self.__next_f.push([2,"0:[\"$\",\"$L0\",null,{\"do_not_add_this\":\"fail\"}]\n"])</script>
|
||||
<script>self.__next_f.push([1,"1:I[46975,[],\"HTTPAccessFallbackBoundary\"]\n2:I[32630,[\"8183\",\"static/chunks/8183-768193f6a9e33cdd.js\"]]\n"])</script>
|
||||
<script nonce="abc123">self.__next_f.push([1,"e:[false,[\"$\",\"div\",null,{\"children\":[\"$\",\"$L18\",null,{\"foo\":\"bar\"}]}],false]\n "])</script>
|
||||
<script>self.__next_f.push([1,"2a:[[\"$\",\"div\",null,{\"className\":\"flex flex-col\",\"children\":[]}],[\"$\",\"$L16\",null,{\"meta\":{\"dateCreated\":1730489700,\"uuid\":\"40cac41d-8d29-4ef5-aa11-75047b9f0907\"}}]]\n"])</script>
|
||||
<script>self.__next_f.push([1,"df:[\"$undefined\",[\"$\",\"div\",null,{\"children\":[\"$\",\"$L17\",null,{}],\"do_not_include_this_field\":\"fail\"}],[\"$\",\"div\",null,{\"children\":[[\"$\",\"$L19\",null,{\"duplicated_field_name\":{\"x\":1}}],[\"$\",\"$L20\",null,{\"duplicated_field_name\":{\"y\":2}}]]}],\"$undefined\"]\n"])</script>
|
||||
<script>self.__next_f.push([3,"MzM6WyIkIiwiJEwzMiIsbnVsbCx7ImRlY29kZWQiOiJzdWNjZXNzIn1d"])</script>
|
||||
'''
|
||||
EXPECTED = {
|
||||
'18': {
|
||||
'foo': 'bar',
|
||||
},
|
||||
'16': {
|
||||
'meta': {
|
||||
'dateCreated': 1730489700,
|
||||
'uuid': '40cac41d-8d29-4ef5-aa11-75047b9f0907',
|
||||
},
|
||||
},
|
||||
'19': {
|
||||
'duplicated_field_name': {'x': 1},
|
||||
},
|
||||
'20': {
|
||||
'duplicated_field_name': {'y': 2},
|
||||
},
|
||||
}
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data(HTML, None), EXPECTED)
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data('', None, fatal=False), {})
|
||||
self.assertEqual(self.ie._search_nextjs_v13_data(None, None, fatal=False), {})
|
||||
|
||||
def test_search_nuxt_json(self):
|
||||
HTML_TMPL = '<script data-ssr="true" id="__NUXT_DATA__" type="application/json">[{}]</script>'
|
||||
VALID_DATA = '''
|
||||
["ShallowReactive",1],
|
||||
{"data":2,"state":21,"once":25,"_errors":28,"_server_errors":30},
|
||||
["ShallowReactive",3],
|
||||
{"$abcdef123456":4},
|
||||
{"podcast":5,"activeEpisodeData":7},
|
||||
{"podcast":6,"seasons":14},
|
||||
{"title":10,"id":11},
|
||||
["Reactive",8],
|
||||
{"episode":9,"creators":18,"empty_list":20},
|
||||
{"title":12,"id":13,"refs":34,"empty_refs":35},
|
||||
"Series Title",
|
||||
"podcast-id-01",
|
||||
"Episode Title",
|
||||
"episode-id-99",
|
||||
[15,16,17],
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
[19],
|
||||
"Podcast Creator",
|
||||
[],
|
||||
{"$ssite-config":22},
|
||||
{"env":23,"name":24,"map":26,"numbers":14},
|
||||
"production",
|
||||
"podcast-website",
|
||||
["Set"],
|
||||
["Reactive",27],
|
||||
["Map"],
|
||||
["ShallowReactive",29],
|
||||
{},
|
||||
["NuxtError",31],
|
||||
{"status":32,"message":33},
|
||||
503,
|
||||
"Service Unavailable",
|
||||
[36,37],
|
||||
[38,39],
|
||||
["Ref",40],
|
||||
["ShallowRef",41],
|
||||
["EmptyRef",42],
|
||||
["EmptyShallowRef",43],
|
||||
"ref",
|
||||
"shallow_ref",
|
||||
"{\\"ref\\":1}",
|
||||
"{\\"shallow_ref\\":2}"
|
||||
'''
|
||||
PAYLOAD = {
|
||||
'data': {
|
||||
'$abcdef123456': {
|
||||
'podcast': {
|
||||
'podcast': {
|
||||
'title': 'Series Title',
|
||||
'id': 'podcast-id-01',
|
||||
},
|
||||
'seasons': [1, 2, 3],
|
||||
},
|
||||
'activeEpisodeData': {
|
||||
'episode': {
|
||||
'title': 'Episode Title',
|
||||
'id': 'episode-id-99',
|
||||
'refs': ['ref', 'shallow_ref'],
|
||||
'empty_refs': [{'ref': 1}, {'shallow_ref': 2}],
|
||||
},
|
||||
'creators': ['Podcast Creator'],
|
||||
'empty_list': [],
|
||||
},
|
||||
},
|
||||
},
|
||||
'state': {
|
||||
'$ssite-config': {
|
||||
'env': 'production',
|
||||
'name': 'podcast-website',
|
||||
'map': [],
|
||||
'numbers': [1, 2, 3],
|
||||
},
|
||||
},
|
||||
'once': [],
|
||||
'_errors': {},
|
||||
'_server_errors': {
|
||||
'status': 503,
|
||||
'message': 'Service Unavailable',
|
||||
},
|
||||
}
|
||||
PARTIALLY_INVALID = [(
|
||||
'''
|
||||
{"data":1},
|
||||
{"invalid_raw_list":2},
|
||||
[15,16,17]
|
||||
''',
|
||||
{'data': {'invalid_raw_list': [None, None, None]}},
|
||||
), (
|
||||
'''
|
||||
{"data":1},
|
||||
["EmptyRef",2],
|
||||
"not valid JSON"
|
||||
''',
|
||||
{'data': None},
|
||||
), (
|
||||
'''
|
||||
{"data":1},
|
||||
["EmptyShallowRef",2],
|
||||
"not valid JSON"
|
||||
''',
|
||||
{'data': None},
|
||||
)]
|
||||
INVALID = [
|
||||
'''
|
||||
[]
|
||||
''',
|
||||
'''
|
||||
["unsupported",1],
|
||||
{"data":2},
|
||||
{}
|
||||
''',
|
||||
]
|
||||
DEFAULT = object()
|
||||
|
||||
self.assertEqual(self.ie._search_nuxt_json(HTML_TMPL.format(VALID_DATA), None), PAYLOAD)
|
||||
self.assertEqual(self.ie._search_nuxt_json('', None, fatal=False), {})
|
||||
self.assertIs(self.ie._search_nuxt_json('', None, default=DEFAULT), DEFAULT)
|
||||
|
||||
for data, expected in PARTIALLY_INVALID:
|
||||
self.assertEqual(
|
||||
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, fatal=False), expected)
|
||||
|
||||
for data in INVALID:
|
||||
self.assertIs(
|
||||
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, default=DEFAULT), DEFAULT)
|
||||
|
||||
|
||||
class TestInfoExtractorNetwork(unittest.TestCase):
|
||||
def setUp(self, /):
|
||||
self.httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), InfoExtractorTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
self.server_thread.daemon = True
|
||||
self.server_thread.start()
|
||||
|
||||
self.called = False
|
||||
|
||||
def require_warning(*args, **kwargs):
|
||||
self.called = True
|
||||
|
||||
self.ydl = FakeYDL()
|
||||
self.ydl.report_warning = require_warning
|
||||
self.ie = DummyIE(self.ydl)
|
||||
|
||||
def tearDown(self, /):
|
||||
self.ydl.close()
|
||||
self.httpd.shutdown()
|
||||
self.httpd.server_close()
|
||||
self.server_thread.join(1)
|
||||
|
||||
def test_extract_m3u8_formats(self):
|
||||
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||
f'http://127.0.0.1:{self.port}/bipbop.m3u8', None, fatal=False)
|
||||
self.assertFalse(self.called)
|
||||
self.assertTrue(formats)
|
||||
self.assertTrue(subtitles)
|
||||
|
||||
def test_extract_m3u8_formats_warning(self):
|
||||
formats, subtitles = self.ie._extract_m3u8_formats_and_subtitles(
|
||||
f'http://127.0.0.1:{self.port}/fake.m3u8', None, fatal=False)
|
||||
self.assertTrue(self.called, 'Warning was not issued for binary m3u8 file')
|
||||
self.assertFalse(formats)
|
||||
self.assertFalse(subtitles)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -4,17 +4,19 @@
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from yt_dlp.globals import all_plugins_loaded
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import json
|
||||
|
||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
from yt_dlp.utils import (
|
||||
@@ -129,8 +131,8 @@ def test(inp, *expected, multi=False):
|
||||
'allow_multiple_audio_streams': multi,
|
||||
})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = map(lambda x: x['format_id'], ydl.downloaded_info_dicts)
|
||||
self.assertEqual(list(downloaded), list(expected))
|
||||
downloaded = [x['format_id'] for x in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded, list(expected))
|
||||
|
||||
test('20/47', '47')
|
||||
test('20/71/worst', '35')
|
||||
@@ -234,6 +236,35 @@ def test_format_selection_video(self):
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
||||
|
||||
def test_format_selection_by_vcodec_sort(self):
|
||||
formats = [
|
||||
{'format_id': 'av1-format', 'ext': 'mp4', 'vcodec': 'av1', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'vp9-hdr-format', 'ext': 'mp4', 'vcodec': 'vp09.02.50.10.01.09.18.09.00', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'vp9-sdr-format', 'ext': 'mp4', 'vcodec': 'vp09.00.50.08', 'acodec': 'none', 'url': TEST_URL},
|
||||
{'format_id': 'h265-format', 'ext': 'mp4', 'vcodec': 'h265', 'acodec': 'none', 'url': TEST_URL},
|
||||
]
|
||||
info_dict = _make_result(formats)
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9.2']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9.2']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||
|
||||
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9']})
|
||||
ydl.process_ie_result(info_dict.copy())
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||
|
||||
def test_format_selection_string_ops(self):
|
||||
formats = [
|
||||
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
||||
@@ -304,99 +335,6 @@ def test_format_selection_string_ops(self):
|
||||
ydl = YDL({'format': '[format_id!*=-]'})
|
||||
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
|
||||
|
||||
def test_youtube_format_selection(self):
|
||||
# FIXME: Rewrite in accordance with the new format sorting options
|
||||
return
|
||||
|
||||
order = [
|
||||
'38', '37', '46', '22', '45', '35', '44', '18', '34', '43', '6', '5', '17', '36', '13',
|
||||
# Apple HTTP Live Streaming
|
||||
'96', '95', '94', '93', '92', '132', '151',
|
||||
# 3D
|
||||
'85', '84', '102', '83', '101', '82', '100',
|
||||
# Dash video
|
||||
'137', '248', '136', '247', '135', '246',
|
||||
'245', '244', '134', '243', '133', '242', '160',
|
||||
# Dash audio
|
||||
'141', '172', '140', '171', '139',
|
||||
]
|
||||
|
||||
def format_info(f_id):
|
||||
info = YoutubeIE._formats[f_id].copy()
|
||||
|
||||
# XXX: In real cases InfoExtractor._parse_mpd_formats() fills up 'acodec'
|
||||
# and 'vcodec', while in tests such information is incomplete since
|
||||
# commit a6c2c24479e5f4827ceb06f64d855329c0a6f593
|
||||
# test_YoutubeDL.test_youtube_format_selection is broken without
|
||||
# this fix
|
||||
if 'acodec' in info and 'vcodec' not in info:
|
||||
info['vcodec'] = 'none'
|
||||
elif 'vcodec' in info and 'acodec' not in info:
|
||||
info['acodec'] = 'none'
|
||||
|
||||
info['format_id'] = f_id
|
||||
info['url'] = 'url:' + f_id
|
||||
return info
|
||||
formats_order = [format_info(f_id) for f_id in order]
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '248+172')
|
||||
self.assertEqual(downloaded['ext'], 'mp4')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '38')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['248+141'])
|
||||
|
||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
def test_audio_only_extractor_format_selection(self):
|
||||
# For extractors with incomplete formats (all formats are audio-only or
|
||||
# video-only) best and worst should fallback to corresponding best/worst
|
||||
@@ -456,11 +394,11 @@ def assert_syntax_error(format_spec):
|
||||
|
||||
def test_format_filtering(self):
|
||||
formats = [
|
||||
{'format_id': 'A', 'filesize': 500, 'width': 1000},
|
||||
{'format_id': 'B', 'filesize': 1000, 'width': 500},
|
||||
{'format_id': 'C', 'filesize': 1000, 'width': 400},
|
||||
{'format_id': 'D', 'filesize': 2000, 'width': 600},
|
||||
{'format_id': 'E', 'filesize': 3000},
|
||||
{'format_id': 'A', 'filesize': 500, 'width': 1000, 'aspect_ratio': 1.0},
|
||||
{'format_id': 'B', 'filesize': 1000, 'width': 500, 'aspect_ratio': 1.33},
|
||||
{'format_id': 'C', 'filesize': 1000, 'width': 400, 'aspect_ratio': 1.5},
|
||||
{'format_id': 'D', 'filesize': 2000, 'width': 600, 'aspect_ratio': 1.78},
|
||||
{'format_id': 'E', 'filesize': 3000, 'aspect_ratio': 0.56},
|
||||
{'format_id': 'F'},
|
||||
{'format_id': 'G', 'filesize': 1000000},
|
||||
]
|
||||
@@ -515,13 +453,62 @@ def test_format_filtering(self):
|
||||
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||
|
||||
ydl = YDL({'format': 'best[height<40]'})
|
||||
try:
|
||||
with contextlib.suppress(ExtractorError):
|
||||
ydl.process_ie_result(info_dict)
|
||||
except ExtractorError:
|
||||
pass
|
||||
self.assertEqual(ydl.downloaded_info_dicts, [])
|
||||
|
||||
def test_default_format_spec(self):
|
||||
ydl = YDL({'format': 'best[aspect_ratio=1]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'A')
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio > 1.00]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio < 1.00]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['E'])
|
||||
|
||||
ydl = YDL({'format': 'best[aspect_ratio=1.5]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'C')
|
||||
|
||||
ydl = YDL({'format': 'all[aspect_ratio!=1]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['E', 'D', 'C', 'B'])
|
||||
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', False)
|
||||
def test_default_format_spec_without_ffmpeg(self):
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'outtmpl': '-'})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', True)
|
||||
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.can_merge', lambda _: True)
|
||||
def test_default_format_spec_with_ffmpeg(self):
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
|
||||
@@ -529,13 +516,13 @@ def test_default_format_spec(self):
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'simulate': True})
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({'outtmpl': '-'})
|
||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||
|
||||
ydl = YDL({})
|
||||
self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||
|
||||
|
||||
@@ -652,8 +639,8 @@ def test_add_extra_info(self):
|
||||
'formats': [
|
||||
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
||||
{'id': 'id 2', 'height': 720},
|
||||
{'id': 'id 3'}
|
||||
]
|
||||
{'id': 'id 3'},
|
||||
],
|
||||
}
|
||||
|
||||
def test_prepare_outtmpl_and_filename(self):
|
||||
@@ -668,7 +655,7 @@ def test(tmpl, expected, *, info=None, **params):
|
||||
|
||||
if not isinstance(expected, (list, tuple)):
|
||||
expected = (expected, expected)
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||
if callable(expect):
|
||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||
elif expect is not None:
|
||||
@@ -707,6 +694,13 @@ def test(tmpl, expected, *, info=None, **params):
|
||||
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
||||
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
||||
|
||||
# Sanitization options
|
||||
test('%(title3)s', (None, 'foo⧸bar⧹test'))
|
||||
test('%(title5)s', (None, 'aei_A'), restrictfilenames=True)
|
||||
test('%(title3)s', (None, 'foo_bar_test'), windowsfilenames=False, restrictfilenames=True)
|
||||
if sys.platform != 'win32':
|
||||
test('%(title3)s', (None, 'foo⧸bar\\test'), windowsfilenames=False)
|
||||
|
||||
# ID sanitization
|
||||
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
||||
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
||||
@@ -773,7 +767,7 @@ def expect_same_infodict(out):
|
||||
test('%(formats)j', (json.dumps(FORMATS), None))
|
||||
test('%(formats)#j', (
|
||||
json.dumps(FORMATS, indent=4),
|
||||
json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', """).replace('\n', ' ')
|
||||
json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', '"').replace('\n', ' '),
|
||||
))
|
||||
test('%(title5).3B', 'á')
|
||||
test('%(title5)U', 'áéí 𝐀')
|
||||
@@ -784,8 +778,8 @@ def expect_same_infodict(out):
|
||||
test('%(filesize)#D', '1Ki')
|
||||
test('%(height)5.2D', ' 1.08k')
|
||||
test('%(title4)#S', 'foo_bar_test')
|
||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
||||
if compat_os_name == 'nt':
|
||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if os.name == 'nt' else ' ')))
|
||||
if os.name == 'nt':
|
||||
test('%(title4)q', ('"foo ""bar"" test"', None))
|
||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
||||
test('%(formats.0.id)#q', ('"id 1"', None))
|
||||
@@ -843,14 +837,14 @@ def gen():
|
||||
|
||||
# Empty filename
|
||||
test('%(foo|)s-%(bar|)s.%(ext)s', '-.mp4')
|
||||
# test('%(foo|)s.%(ext)s', ('.mp4', '_.mp4')) # fixme
|
||||
# test('%(foo|)s', ('', '_')) # fixme
|
||||
# test('%(foo|)s.%(ext)s', ('.mp4', '_.mp4')) # FIXME: ?
|
||||
# test('%(foo|)s', ('', '_')) # FIXME: ?
|
||||
|
||||
# Environment variable expansion for prepare_filename
|
||||
os.environ['__yt_dlp_var'] = 'expanded'
|
||||
envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
|
||||
envvar = '%__yt_dlp_var%' if os.name == 'nt' else '$__yt_dlp_var'
|
||||
test(envvar, (envvar, 'expanded'))
|
||||
if compat_os_name == 'nt':
|
||||
if os.name == 'nt':
|
||||
test('%s%', ('%s%', '%s%'))
|
||||
os.environ['s'] = 'expanded'
|
||||
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
||||
@@ -861,7 +855,7 @@ def gen():
|
||||
test('Hello %(title1)s', 'Hello $PATH')
|
||||
test('Hello %(title2)s', 'Hello %PATH%')
|
||||
test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
|
||||
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo⧸bar⧹test' % os.path.sep))
|
||||
test('folder/%(title3)s', ('folder/foo/bar\\test', f'folder{os.path.sep}foo⧸bar⧹test'))
|
||||
|
||||
def test_format_note(self):
|
||||
ydl = YoutubeDL()
|
||||
@@ -883,22 +877,22 @@ def run(self, info):
|
||||
f.write('EXAMPLE')
|
||||
return [info['filepath']], info
|
||||
|
||||
def run_pp(params, PP):
|
||||
def run_pp(params, pp):
|
||||
with open(filename, 'w') as f:
|
||||
f.write('EXAMPLE')
|
||||
ydl = YoutubeDL(params)
|
||||
ydl.add_post_processor(PP())
|
||||
ydl.add_post_processor(pp())
|
||||
ydl.post_process(filename, {'filepath': filename})
|
||||
|
||||
run_pp({'keepvideo': True}, SimplePP)
|
||||
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
|
||||
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
|
||||
self.assertTrue(os.path.exists(filename), f'{filename} doesn\'t exist')
|
||||
self.assertTrue(os.path.exists(audiofile), f'{audiofile} doesn\'t exist')
|
||||
os.unlink(filename)
|
||||
os.unlink(audiofile)
|
||||
|
||||
run_pp({'keepvideo': False}, SimplePP)
|
||||
self.assertFalse(os.path.exists(filename), '%s exists' % filename)
|
||||
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
|
||||
self.assertFalse(os.path.exists(filename), f'{filename} exists')
|
||||
self.assertTrue(os.path.exists(audiofile), f'{audiofile} doesn\'t exist')
|
||||
os.unlink(audiofile)
|
||||
|
||||
class ModifierPP(PostProcessor):
|
||||
@@ -908,7 +902,7 @@ def run(self, info):
|
||||
return [], info
|
||||
|
||||
run_pp({'keepvideo': False}, ModifierPP)
|
||||
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
|
||||
self.assertTrue(os.path.exists(filename), f'{filename} doesn\'t exist')
|
||||
os.unlink(filename)
|
||||
|
||||
def test_match_filter(self):
|
||||
@@ -920,7 +914,7 @@ def test_match_filter(self):
|
||||
'duration': 30,
|
||||
'filesize': 10 * 1024,
|
||||
'playlist_id': '42',
|
||||
'uploader': "變態妍字幕版 太妍 тест",
|
||||
'uploader': '變態妍字幕版 太妍 тест',
|
||||
'creator': "тест ' 123 ' тест--",
|
||||
'webpage_url': 'http://example.com/watch?v=shenanigans',
|
||||
}
|
||||
@@ -933,7 +927,7 @@ def test_match_filter(self):
|
||||
'description': 'foo',
|
||||
'filesize': 5 * 1024,
|
||||
'playlist_id': '43',
|
||||
'uploader': "тест 123",
|
||||
'uploader': 'тест 123',
|
||||
'webpage_url': 'http://example.com/watch?v=SHENANIGANS',
|
||||
}
|
||||
videos = [first, second]
|
||||
@@ -1059,7 +1053,7 @@ def test_selection(params, expected_ids, evaluate_all=False):
|
||||
entries = func(evaluated)
|
||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params, entries)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||
|
||||
test_selection({}, INDICES)
|
||||
@@ -1180,7 +1174,7 @@ def _real_extract(self, url):
|
||||
})
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': 'Video %s' % video_id,
|
||||
'title': f'Video {video_id}',
|
||||
'formats': formats,
|
||||
}
|
||||
|
||||
@@ -1194,8 +1188,8 @@ def _entries(self):
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': VideoIE.ie_key(),
|
||||
'id': video_id,
|
||||
'url': 'video:%s' % video_id,
|
||||
'title': 'Video Transparent %s' % video_id,
|
||||
'url': f'video:{video_id}',
|
||||
'title': f'Video Transparent {video_id}',
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
@@ -1341,6 +1335,33 @@ def check_for_cookie_header(result):
|
||||
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
||||
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
||||
|
||||
def test_load_plugins_compat(self):
|
||||
# Should try to reload plugins if they haven't already been loaded
|
||||
all_plugins_loaded.value = False
|
||||
FakeYDL().close()
|
||||
assert all_plugins_loaded.value
|
||||
|
||||
def test_close_hooks(self):
|
||||
# Should call all registered close hooks on close
|
||||
close_hook_called = False
|
||||
close_hook_two_called = False
|
||||
|
||||
def close_hook():
|
||||
nonlocal close_hook_called
|
||||
close_hook_called = True
|
||||
|
||||
def close_hook_two():
|
||||
nonlocal close_hook_two_called
|
||||
close_hook_two_called = True
|
||||
|
||||
ydl = FakeYDL()
|
||||
ydl.add_close_hook(close_hook)
|
||||
ydl.add_close_hook(close_hook_two)
|
||||
|
||||
ydl.close()
|
||||
self.assertTrue(close_hook_called, 'Close hook was not called')
|
||||
self.assertTrue(close_hook_two_called, 'Close hook two was not called')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
pad_block,
|
||||
)
|
||||
from yt_dlp.dependencies import Cryptodome
|
||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
||||
|
||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||
|
||||
@@ -40,33 +39,33 @@ def setUp(self):
|
||||
def test_encrypt(self):
|
||||
msg = b'message'
|
||||
key = list(range(16))
|
||||
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
|
||||
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
|
||||
encrypted = aes_encrypt(list(msg), key)
|
||||
decrypted = bytes(aes_decrypt(encrypted, key))
|
||||
self.assertEqual(decrypted, msg)
|
||||
|
||||
def test_cbc_decrypt(self):
|
||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
||||
decrypted = bytes(aes_cbc_decrypt(list(data), self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
||||
decrypted = aes_cbc_decrypt_bytes(data, bytes(self.key), bytes(self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_cbc_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
||||
|
||||
def test_ctr_decrypt(self):
|
||||
data = bytes_to_intlist(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
decrypted = intlist_to_bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||
data = list(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
decrypted = bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_ctr_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||
@@ -75,47 +74,59 @@ def test_gcm_decrypt(self):
|
||||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
||||
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
||||
|
||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
||||
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
||||
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_gcm_aligned_decrypt(self):
|
||||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f'
|
||||
authentication_tag = b'\x08\xb1\x9d!&\x98\xd0\xeaRq\x90\xe6;\xb5]\xd8'
|
||||
|
||||
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||
if Cryptodome.AES:
|
||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||
|
||||
def test_decrypt_text(self):
|
||||
password = intlist_to_bytes(self.key).decode()
|
||||
password = bytes(self.key).decode()
|
||||
encrypted = base64.b64encode(
|
||||
intlist_to_bytes(self.iv[:8])
|
||||
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
|
||||
bytes(self.iv[:8])
|
||||
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae',
|
||||
).decode()
|
||||
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
||||
self.assertEqual(decrypted, self.secret_msg)
|
||||
|
||||
password = intlist_to_bytes(self.key).decode()
|
||||
password = bytes(self.key).decode()
|
||||
encrypted = base64.b64encode(
|
||||
intlist_to_bytes(self.iv[:8])
|
||||
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
|
||||
bytes(self.iv[:8])
|
||||
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83',
|
||||
).decode()
|
||||
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
||||
self.assertEqual(decrypted, self.secret_msg)
|
||||
|
||||
def test_ecb_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
||||
data = list(self.secret_msg)
|
||||
encrypted = bytes(aes_ecb_encrypt(data, self.key))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
|
||||
def test_ecb_decrypt(self):
|
||||
data = bytes_to_intlist(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||
data = list(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
decrypted = bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_key_expansion(self):
|
||||
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||
|
||||
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
||||
self.assertEqual(key_expansion(list(bytearray.fromhex(key))), [
|
||||
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||
@@ -132,16 +143,16 @@ def test_pad_block(self):
|
||||
block = [0x21, 0xA0, 0x43, 0xFF]
|
||||
|
||||
self.assertEqual(pad_block(block, 'pkcs7'),
|
||||
block + [0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
||||
[*block, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
||||
|
||||
self.assertEqual(pad_block(block, 'iso7816'),
|
||||
block + [0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
[*block, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
|
||||
self.assertEqual(pad_block(block, 'whitespace'),
|
||||
block + [0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
||||
[*block, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
||||
|
||||
self.assertEqual(pad_block(block, 'zero'),
|
||||
block + [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
[*block, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
|
||||
block = list(range(16))
|
||||
for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import datetime as dt
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
@@ -12,27 +13,19 @@
|
||||
|
||||
from yt_dlp import compat
|
||||
from yt_dlp.compat import urllib # isort: split
|
||||
from yt_dlp.compat import (
|
||||
compat_etree_fromstring,
|
||||
compat_expanduser,
|
||||
compat_urllib_parse_unquote,
|
||||
compat_urllib_parse_urlencode,
|
||||
)
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||
from yt_dlp.compat.urllib.request import getproxies
|
||||
|
||||
|
||||
class TestCompat(unittest.TestCase):
|
||||
def test_compat_passthrough(self):
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
compat.compat_basestring
|
||||
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
compat.WINDOWS_VT_MODE
|
||||
_ = compat.compat_basestring
|
||||
|
||||
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
compat.compat_pycrypto_AES # Must not raise error
|
||||
_ = compat.compat_pycrypto_AES # Must not raise error
|
||||
|
||||
def test_compat_expanduser(self):
|
||||
old_home = os.environ.get('HOME')
|
||||
@@ -43,39 +36,6 @@ def test_compat_expanduser(self):
|
||||
finally:
|
||||
os.environ['HOME'] = old_home or ''
|
||||
|
||||
def test_compat_urllib_parse_unquote(self):
|
||||
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%7e/abc+def'), '~/abc+def')
|
||||
self.assertEqual(compat_urllib_parse_unquote(''), '')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%'), '%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%%'), '%%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%%%'), '%%%')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%2F'), '/')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%2f'), '/')
|
||||
self.assertEqual(compat_urllib_parse_unquote('%E6%B4%A5%E6%B3%A2'), '津波')
|
||||
self.assertEqual(
|
||||
compat_urllib_parse_unquote('''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%25%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
|
||||
%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a'''),
|
||||
'''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
|
||||
%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''')
|
||||
self.assertEqual(
|
||||
compat_urllib_parse_unquote('''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%'''),
|
||||
'''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
|
||||
|
||||
def test_compat_urllib_parse_unquote_plus(self):
|
||||
self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
|
||||
self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
|
||||
|
||||
def test_compat_urllib_parse_urlencode(self):
|
||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': b'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': 'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': b'def'}), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', 'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', b'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
|
||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
|
||||
|
||||
def test_compat_etree_fromstring(self):
|
||||
xml = '''
|
||||
<root foo="bar" spam="中文">
|
||||
@@ -100,6 +60,45 @@ def test_compat_etree_fromstring_doctype(self):
|
||||
def test_struct_unpack(self):
|
||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||
|
||||
def test_compat_datetime_from_timestamp(self):
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(3600),
|
||||
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-86400),
|
||||
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0.5),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1.000001),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1.25),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1577923200),
|
||||
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(4102444800),
|
||||
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(173568960000),
|
||||
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -71,7 +71,7 @@ def _generate_expected_groups():
|
||||
Path('/etc/yt-dlp.conf'),
|
||||
Path('/etc/yt-dlp/config'),
|
||||
Path('/etc/yt-dlp/config.txt'),
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,14 @@ def test_get_desktop_environment(self):
|
||||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||
|
||||
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'my_custom_de', 'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||
|
||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||
@@ -67,6 +75,7 @@ def test_get_desktop_environment(self):
|
||||
({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'ubuntu:GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
||||
|
||||
({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
|
||||
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5),
|
||||
@@ -104,15 +113,31 @@ def test_chrome_cookie_decryptor_linux_v11(self):
|
||||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_linux_v10_meta24(self):
|
||||
with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
|
||||
encrypted_value = b'v10\x1f\xe4\x0e[\x83\x0c\xcc*kPi \xce\x8d\x1d\xbb\x80\r\x11\t\xbb\x9e^Hy\x94\xf4\x963\x9f\x82\xba\xfe\xa1\xed\xb9\xf1)\x00710\x92\xc8/<\x96B'
|
||||
value = 'DE'
|
||||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger(), meta_version=24)
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_windows_v10(self):
|
||||
with MonkeyPatch(cookies, {
|
||||
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&'
|
||||
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&',
|
||||
}):
|
||||
encrypted_value = b'v10T\xb8\xf3\xb8\x01\xa7TtcV\xfc\x88\xb8\xb8\xef\x05\xb5\xfd\x18\xc90\x009\xab\xb1\x893\x85)\x87\xe1\xa9-\xa3\xad='
|
||||
value = '32101439'
|
||||
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_windows_v10_meta24(self):
|
||||
with MonkeyPatch(cookies, {
|
||||
'_get_windows_v10_key': lambda *args, **kwargs: b'\xea\x8b\x02\xc3\xc6\xc5\x99\xc3\xa3[ j\xfa\xf6\xfcU\xac\x13u\xdc\x0c\x0e\xf1\x03\x90\xb6\xdf\xbb\x8fL\xb1\xb2',
|
||||
}):
|
||||
encrypted_value = b'v10dN\xe1\xacy\x84^\xe1I\xact\x03r\xfb\xe2\xce{^\x0e<(\xb0y\xeb\x01\xfb@"\x9e\x8c\xa53~\xdb*\x8f\xac\x8b\xe3\xfd3\x06\xe5\x93\x19OyOG\xb2\xfb\x1d$\xc0\xda\x13j\x9e\xfe\xc5\xa3\xa8\xfe\xd9'
|
||||
value = '1234'
|
||||
decryptor = WindowsChromeCookieDecryptor('', Logger(), meta_version=24)
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_chrome_cookie_decryptor_mac_v10(self):
|
||||
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
||||
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
||||
@@ -121,17 +146,17 @@ def test_chrome_cookie_decryptor_mac_v10(self):
|
||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||
|
||||
def test_safari_cookie_parsing(self):
|
||||
cookies = \
|
||||
b'cook\x00\x00\x00\x01\x00\x00\x00i\x00\x00\x01\x00\x01\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00Y' \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x008\x00\x00\x00B\x00\x00\x00F\x00\x00\x00H' \
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x03\xa5>\xc3A\x00\x00\x80\xc3\x07:\xc3A' \
|
||||
b'localhost\x00foo\x00/\x00test%20%3Bcookie\x00\x00\x00\x054\x07\x17 \x05\x00\x00\x00Kbplist00\xd1\x01' \
|
||||
b'\x02_\x10\x18NSHTTPCookieAcceptPolicy\x10\x02\x08\x0b&\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00' \
|
||||
b'\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00('
|
||||
cookies = (
|
||||
b'cook\x00\x00\x00\x01\x00\x00\x00i\x00\x00\x01\x00\x01\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00Y'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x008\x00\x00\x00B\x00\x00\x00F\x00\x00\x00H'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x03\xa5>\xc3A\x00\x00\x80\xc3\x07:\xc3A'
|
||||
b'localhost\x00foo\x00/\x00test%20%3Bcookie\x00\x00\x00\x054\x07\x17 \x05\x00\x00\x00Kbplist00\xd1\x01'
|
||||
b'\x02_\x10\x18NSHTTPCookieAcceptPolicy\x10\x02\x08\x0b&\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(')
|
||||
|
||||
jar = parse_safari_cookies(cookies)
|
||||
self.assertEqual(len(jar), 1)
|
||||
cookie = list(jar)[0]
|
||||
cookie = next(iter(jar))
|
||||
self.assertEqual(cookie.domain, 'localhost')
|
||||
self.assertEqual(cookie.port, None)
|
||||
self.assertEqual(cookie.path, '/')
|
||||
@@ -164,7 +189,7 @@ def _run_tests(self, *cases):
|
||||
attributes = {
|
||||
key: value
|
||||
for key, value in dict(morsel).items()
|
||||
if value != ""
|
||||
if value != ''
|
||||
}
|
||||
self.assertEqual(attributes, expected_attributes, message)
|
||||
|
||||
@@ -174,133 +199,133 @@ def test_parsing(self):
|
||||
self._run_tests(
|
||||
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
||||
(
|
||||
"Test basic cookie",
|
||||
"chips=ahoy; vienna=finger",
|
||||
{"chips": "ahoy", "vienna": "finger"},
|
||||
'Test basic cookie',
|
||||
'chips=ahoy; vienna=finger',
|
||||
{'chips': 'ahoy', 'vienna': 'finger'},
|
||||
),
|
||||
(
|
||||
"Test quoted cookie",
|
||||
'Test quoted cookie',
|
||||
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
||||
{"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
|
||||
{'keebler': 'E=mc2; L="Loves"; fudge=\012;'},
|
||||
),
|
||||
(
|
||||
"Allow '=' in an unquoted value",
|
||||
"keebler=E=mc2",
|
||||
{"keebler": "E=mc2"},
|
||||
'keebler=E=mc2',
|
||||
{'keebler': 'E=mc2'},
|
||||
),
|
||||
(
|
||||
"Allow cookies with ':' in their name",
|
||||
"key:term=value:term",
|
||||
{"key:term": "value:term"},
|
||||
'key:term=value:term',
|
||||
{'key:term': 'value:term'},
|
||||
),
|
||||
(
|
||||
"Allow '[' and ']' in cookie values",
|
||||
"a=b; c=[; d=r; f=h",
|
||||
{"a": "b", "c": "[", "d": "r", "f": "h"},
|
||||
'a=b; c=[; d=r; f=h',
|
||||
{'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'},
|
||||
),
|
||||
(
|
||||
"Test basic cookie attributes",
|
||||
'Test basic cookie attributes',
|
||||
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
|
||||
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'path': '/acme'})},
|
||||
),
|
||||
(
|
||||
"Test flag only cookie attributes",
|
||||
'Test flag only cookie attributes',
|
||||
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
||||
{"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
|
||||
{'Customer': ('WILE_E_COYOTE', {'httponly': True, 'secure': True})},
|
||||
),
|
||||
(
|
||||
"Test flag only attribute with values",
|
||||
"eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
|
||||
{"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
|
||||
'Test flag only attribute with values',
|
||||
'eggs=scrambled; httponly=foo; secure=bar; Path=/bacon',
|
||||
{'eggs': ('scrambled', {'httponly': 'foo', 'secure': 'bar', 'path': '/bacon'})},
|
||||
),
|
||||
(
|
||||
"Test special case for 'expires' attribute, 4 digit year",
|
||||
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
|
||||
{'Customer': ('W', {'expires': 'Wed, 01 Jan 2010 00:00:00 GMT'})},
|
||||
),
|
||||
(
|
||||
"Test special case for 'expires' attribute, 2 digit year",
|
||||
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
|
||||
{'Customer': ('W', {'expires': 'Wed, 01 Jan 98 00:00:00 GMT'})},
|
||||
),
|
||||
(
|
||||
"Test extra spaces in keys and values",
|
||||
"eggs = scrambled ; secure ; path = bar ; foo=foo ",
|
||||
{"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
|
||||
'Test extra spaces in keys and values',
|
||||
'eggs = scrambled ; secure ; path = bar ; foo=foo ',
|
||||
{'eggs': ('scrambled', {'secure': True, 'path': 'bar'}), 'foo': 'foo'},
|
||||
),
|
||||
(
|
||||
"Test quoted attributes",
|
||||
'Test quoted attributes',
|
||||
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
|
||||
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'path': '/acme'})},
|
||||
),
|
||||
# Our own tests that CPython passes
|
||||
(
|
||||
"Allow ';' in quoted value",
|
||||
'chips="a;hoy"; vienna=finger',
|
||||
{"chips": "a;hoy", "vienna": "finger"},
|
||||
{'chips': 'a;hoy', 'vienna': 'finger'},
|
||||
),
|
||||
(
|
||||
"Keep only the last set value",
|
||||
"a=c; a=b",
|
||||
{"a": "b"},
|
||||
'Keep only the last set value',
|
||||
'a=c; a=b',
|
||||
{'a': 'b'},
|
||||
),
|
||||
)
|
||||
|
||||
def test_lenient_parsing(self):
|
||||
self._run_tests(
|
||||
(
|
||||
"Ignore and try to skip invalid cookies",
|
||||
'Ignore and try to skip invalid cookies',
|
||||
'chips={"ahoy;": 1}; vienna="finger;"',
|
||||
{"vienna": "finger;"},
|
||||
{'vienna': 'finger;'},
|
||||
),
|
||||
(
|
||||
"Ignore cookies without a name",
|
||||
"a=b; unnamed; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
'Ignore cookies without a name',
|
||||
'a=b; unnamed; c=d',
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Ignore '\"' cookie without name",
|
||||
'a=b; "; c=d',
|
||||
{"a": "b", "c": "d"},
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Skip all space separated values",
|
||||
"x a=b c=d x; e=f",
|
||||
{"a": "b", "c": "d", "e": "f"},
|
||||
'Skip all space separated values',
|
||||
'x a=b c=d x; e=f',
|
||||
{'a': 'b', 'c': 'd', 'e': 'f'},
|
||||
),
|
||||
(
|
||||
"Skip all space separated values",
|
||||
'Skip all space separated values',
|
||||
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
||||
{"a": "b", "c": "d"},
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Expect quote mending",
|
||||
'Expect quote mending',
|
||||
'a=b; invalid="; c=d',
|
||||
{"a": "b", "c": "d"},
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Reset morsel after invalid to not capture attributes",
|
||||
"a=b; invalid; Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
'Reset morsel after invalid to not capture attributes',
|
||||
'a=b; invalid; Version=1; c=d',
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Reset morsel after invalid to not capture attributes",
|
||||
"a=b; $invalid; $Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
'Reset morsel after invalid to not capture attributes',
|
||||
'a=b; $invalid; $Version=1; c=d',
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Continue after non-flag attribute without value",
|
||||
"a=b; path; Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
'Continue after non-flag attribute without value',
|
||||
'a=b; path; Version=1; c=d',
|
||||
{'a': 'b', 'c': 'd'},
|
||||
),
|
||||
(
|
||||
"Allow cookie attributes with `$` prefix",
|
||||
'Allow cookie attributes with `$` prefix',
|
||||
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
|
||||
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'secure': True, 'path': '/acme'})},
|
||||
),
|
||||
(
|
||||
"Invalid Morsel keys should not result in an error",
|
||||
"Key=Value; [Invalid]=Value; Another=Value",
|
||||
{"Key": "Value", "Another": "Value"},
|
||||
'Invalid Morsel keys should not result in an error',
|
||||
'Key=Value; [Invalid]=Value; Another=Value',
|
||||
{'Key': 'Value', 'Another': 'Value'},
|
||||
),
|
||||
)
|
||||
|
||||
235
test/test_devalue.py
Normal file
235
test/test_devalue.py
Normal file
@@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import datetime as dt
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from yt_dlp.utils.jslib import devalue
|
||||
|
||||
|
||||
TEST_CASES_EQUALS = [{
|
||||
'name': 'int',
|
||||
'unparsed': [-42],
|
||||
'parsed': -42,
|
||||
}, {
|
||||
'name': 'str',
|
||||
'unparsed': ['woo!!!'],
|
||||
'parsed': 'woo!!!',
|
||||
}, {
|
||||
'name': 'Number',
|
||||
'unparsed': [['Object', 42]],
|
||||
'parsed': 42,
|
||||
}, {
|
||||
'name': 'String',
|
||||
'unparsed': [['Object', 'yar']],
|
||||
'parsed': 'yar',
|
||||
}, {
|
||||
'name': 'Infinity',
|
||||
'unparsed': -4,
|
||||
'parsed': math.inf,
|
||||
}, {
|
||||
'name': 'negative Infinity',
|
||||
'unparsed': -5,
|
||||
'parsed': -math.inf,
|
||||
}, {
|
||||
'name': 'negative zero',
|
||||
'unparsed': -6,
|
||||
'parsed': -0.0,
|
||||
}, {
|
||||
'name': 'RegExp',
|
||||
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||
'parsed': re.compile('regexp'),
|
||||
}, {
|
||||
'name': 'Date',
|
||||
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||
}, {
|
||||
'name': 'Array',
|
||||
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||
'parsed': ['a', 'b', 'c'],
|
||||
}, {
|
||||
'name': 'Array (empty)',
|
||||
'unparsed': [[]],
|
||||
'parsed': [],
|
||||
}, {
|
||||
'name': 'Array (sparse)',
|
||||
'unparsed': [[-2, 1, -2], 'b'],
|
||||
'parsed': [None, 'b', None],
|
||||
}, {
|
||||
'name': 'Object',
|
||||
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||
}, {
|
||||
'name': 'Set',
|
||||
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'Map',
|
||||
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||
'parsed': [['a', 'b']],
|
||||
}, {
|
||||
'name': 'BigInt',
|
||||
'unparsed': [['BigInt', '1']],
|
||||
'parsed': 1,
|
||||
}, {
|
||||
'name': 'Uint8Array',
|
||||
'unparsed': [['Uint8Array', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'ArrayBuffer',
|
||||
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||
'parsed': [1, 2, 3],
|
||||
}, {
|
||||
'name': 'str (repetition)',
|
||||
'unparsed': [[1, 1], 'a string'],
|
||||
'parsed': ['a string', 'a string'],
|
||||
}, {
|
||||
'name': 'None (repetition)',
|
||||
'unparsed': [[1, 1], None],
|
||||
'parsed': [None, None],
|
||||
}, {
|
||||
'name': 'dict (repetition)',
|
||||
'unparsed': [[1, 1], {}],
|
||||
'parsed': [{}, {}],
|
||||
}, {
|
||||
'name': 'Object without prototype',
|
||||
'unparsed': [['null']],
|
||||
'parsed': {},
|
||||
}, {
|
||||
'name': 'cross-realm POJO',
|
||||
'unparsed': [{}],
|
||||
'parsed': {},
|
||||
}]
|
||||
|
||||
TEST_CASES_IS = [{
|
||||
'name': 'bool',
|
||||
'unparsed': [True],
|
||||
'parsed': True,
|
||||
}, {
|
||||
'name': 'Boolean',
|
||||
'unparsed': [['Object', False]],
|
||||
'parsed': False,
|
||||
}, {
|
||||
'name': 'undefined',
|
||||
'unparsed': -1,
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': [None],
|
||||
'parsed': None,
|
||||
}, {
|
||||
'name': 'NaN',
|
||||
'unparsed': -3,
|
||||
'parsed': math.nan,
|
||||
}]
|
||||
|
||||
TEST_CASES_INVALID = [{
|
||||
'name': 'empty string',
|
||||
'unparsed': '',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'hole',
|
||||
'unparsed': -2,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'string',
|
||||
'unparsed': 'hello',
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'number',
|
||||
'unparsed': 42,
|
||||
'error': ValueError,
|
||||
'pattern': r'invalid integer input',
|
||||
}, {
|
||||
'name': 'boolean',
|
||||
'unparsed': True,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'null',
|
||||
'unparsed': None,
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'object',
|
||||
'unparsed': {},
|
||||
'error': ValueError,
|
||||
'pattern': r'expected int or list as input',
|
||||
}, {
|
||||
'name': 'empty array',
|
||||
'unparsed': [],
|
||||
'error': ValueError,
|
||||
'pattern': r'expected a non-empty list as input',
|
||||
}, {
|
||||
'name': 'Python negative indexing',
|
||||
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||
'error': IndexError,
|
||||
'pattern': r'invalid index: -7',
|
||||
}]
|
||||
|
||||
|
||||
class TestDevalue(unittest.TestCase):
|
||||
def test_devalue_parse_equals(self):
|
||||
for tc in TEST_CASES_EQUALS:
|
||||
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_is(self):
|
||||
for tc in TEST_CASES_IS:
|
||||
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||
|
||||
def test_devalue_parse_invalid(self):
|
||||
for tc in TEST_CASES_INVALID:
|
||||
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||
devalue.parse(tc['unparsed'])
|
||||
|
||||
def test_devalue_parse_cyclical(self):
|
||||
name = 'Map (cyclical)'
|
||||
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||
self.assertEqual(result[0][0], 'self', name)
|
||||
self.assertIs(result, result[0][1], name)
|
||||
|
||||
name = 'Set (cyclical)'
|
||||
result = devalue.parse([['Set', 0, 1], 42])
|
||||
self.assertEqual(result[1], 42, name)
|
||||
self.assertIs(result, result[0], name)
|
||||
|
||||
result = devalue.parse([[0]])
|
||||
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||
|
||||
name = 'Object (cyclical)'
|
||||
result = devalue.parse([{'self': 0}])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Object with null prototype (cyclical)'
|
||||
result = devalue.parse([['null', 'self', 0]])
|
||||
self.assertIs(result, result['self'], name)
|
||||
|
||||
name = 'Objects (cyclical)'
|
||||
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||
self.assertIs(result[0], result[1]['first'], name)
|
||||
self.assertIs(result[1], result[0]['second'], name)
|
||||
|
||||
def test_devalue_parse_revivers(self):
|
||||
self.assertEqual(
|
||||
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||
{'a': 'b'}, 'revivers (indirect)')
|
||||
|
||||
self.assertEqual(
|
||||
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||
{'a': 0}, 'revivers (parse)')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -14,13 +14,13 @@
|
||||
|
||||
from test.helper import (
|
||||
assertGreaterEqual,
|
||||
assertLessEqual,
|
||||
expect_info_dict,
|
||||
expect_warnings,
|
||||
get_params,
|
||||
gettestcases,
|
||||
getwebpagetestcases,
|
||||
is_download_test,
|
||||
report_warning,
|
||||
try_rm,
|
||||
)
|
||||
|
||||
@@ -66,10 +66,6 @@ def _file_md5(fn):
|
||||
|
||||
@is_download_test
|
||||
class TestDownload(unittest.TestCase):
|
||||
# Parallel testing in nosetests. See
|
||||
# http://nose.readthedocs.org/en/latest/doc_tests/test_multiprocess/multiprocess.html
|
||||
_multiprocess_shared_ = True
|
||||
|
||||
maxDiff = None
|
||||
|
||||
COMPLETED_TESTS = {}
|
||||
@@ -94,7 +90,7 @@ def test_template(self):
|
||||
'playlist', [] if is_playlist else [test_case])
|
||||
|
||||
def print_skipping(reason):
|
||||
print('Skipping %s: %s' % (test_case['name'], reason))
|
||||
print('Skipping {}: {}'.format(test_case['name'], reason))
|
||||
self.skipTest(reason)
|
||||
|
||||
if not ie.working():
|
||||
@@ -117,15 +113,18 @@ def print_skipping(reason):
|
||||
|
||||
for other_ie in other_ies:
|
||||
if not other_ie.working():
|
||||
print_skipping('test depends on %sIE, marked as not WORKING' % other_ie.ie_key())
|
||||
print_skipping(f'test depends on {other_ie.ie_key()}IE, marked as not WORKING')
|
||||
|
||||
params = get_params(test_case.get('params', {}))
|
||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||
if is_playlist and 'playlist' not in test_case:
|
||||
params.setdefault('extract_flat', 'in_playlist')
|
||||
params.setdefault('playlistend', test_case.get(
|
||||
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
||||
params.setdefault('playlistend', max(
|
||||
test_case.get('playlist_mincount', -1),
|
||||
test_case.get('playlist_count', -2) + 1,
|
||||
test_case.get('playlist_maxcount', -2) + 1))
|
||||
params.setdefault('skip_download', True)
|
||||
if 'playlist_duration_sum' not in test_case:
|
||||
params.setdefault('extract_flat', 'in_playlist')
|
||||
|
||||
ydl = YoutubeDL(params, auto_init=False)
|
||||
ydl.add_default_info_extractors()
|
||||
@@ -148,10 +147,7 @@ def match_exception(err):
|
||||
return False
|
||||
if err.__class__.__name__ == expected_exception:
|
||||
return True
|
||||
for exc in err.exc_info:
|
||||
if exc.__class__.__name__ == expected_exception:
|
||||
return True
|
||||
return False
|
||||
return any(exc.__class__.__name__ == expected_exception for exc in err.exc_info)
|
||||
|
||||
def try_rm_tcs_files(tcs=None):
|
||||
if tcs is None:
|
||||
@@ -163,6 +159,7 @@ def try_rm_tcs_files(tcs=None):
|
||||
try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
|
||||
try_rm_tcs_files()
|
||||
try:
|
||||
test_url = test_case['url']
|
||||
try_num = 1
|
||||
while True:
|
||||
try:
|
||||
@@ -170,7 +167,7 @@ def try_rm_tcs_files(tcs=None):
|
||||
# for outside error handling, and returns the exit code
|
||||
# instead of the result dict.
|
||||
res_dict = ydl.extract_info(
|
||||
test_case['url'],
|
||||
test_url,
|
||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||
except (DownloadError, ExtractorError) as err:
|
||||
# Check if the exception is not a network related one
|
||||
@@ -181,8 +178,7 @@ def try_rm_tcs_files(tcs=None):
|
||||
raise
|
||||
|
||||
if try_num == RETRIES:
|
||||
report_warning('%s failed due to network errors, skipping...' % tname)
|
||||
return
|
||||
raise
|
||||
|
||||
print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
|
||||
|
||||
@@ -199,23 +195,23 @@ def try_rm_tcs_files(tcs=None):
|
||||
self.assertTrue('entries' in res_dict)
|
||||
expect_info_dict(self, res_dict, test_case.get('info_dict', {}))
|
||||
|
||||
num_entries = len(res_dict.get('entries', []))
|
||||
if 'playlist_mincount' in test_case:
|
||||
mincount = test_case['playlist_mincount']
|
||||
assertGreaterEqual(
|
||||
self,
|
||||
len(res_dict['entries']),
|
||||
test_case['playlist_mincount'],
|
||||
'Expected at least %d in playlist %s, but got only %d' % (
|
||||
test_case['playlist_mincount'], test_case['url'],
|
||||
len(res_dict['entries'])))
|
||||
self, num_entries, mincount,
|
||||
f'Expected at least {mincount} entries in playlist {test_url}, but got only {num_entries}')
|
||||
if 'playlist_count' in test_case:
|
||||
count = test_case['playlist_count']
|
||||
got = num_entries if num_entries <= count else 'more'
|
||||
self.assertEqual(
|
||||
len(res_dict['entries']),
|
||||
test_case['playlist_count'],
|
||||
'Expected %d entries in playlist %s, but got %d.' % (
|
||||
test_case['playlist_count'],
|
||||
test_case['url'],
|
||||
len(res_dict['entries']),
|
||||
))
|
||||
num_entries, count,
|
||||
f'Expected exactly {count} entries in playlist {test_url}, but got {got}')
|
||||
if 'playlist_maxcount' in test_case:
|
||||
maxcount = test_case['playlist_maxcount']
|
||||
assertLessEqual(
|
||||
self, num_entries, maxcount,
|
||||
f'Expected at most {maxcount} entries in playlist {test_url}, but got more')
|
||||
if 'playlist_duration_sum' in test_case:
|
||||
got_duration = sum(e['duration'] for e in res_dict['entries'])
|
||||
self.assertEqual(
|
||||
@@ -244,9 +240,8 @@ def try_rm_tcs_files(tcs=None):
|
||||
got_fsize = os.path.getsize(tc_filename)
|
||||
assertGreaterEqual(
|
||||
self, got_fsize, expected_minsize,
|
||||
'Expected %s to be at least %s, but it\'s only %s ' %
|
||||
(tc_filename, format_bytes(expected_minsize),
|
||||
format_bytes(got_fsize)))
|
||||
f'Expected {tc_filename} to be at least {format_bytes(expected_minsize)}, '
|
||||
f'but it\'s only {format_bytes(got_fsize)} ')
|
||||
if 'md5' in tc:
|
||||
md5_for_file = _file_md5(tc_filename)
|
||||
self.assertEqual(tc['md5'], md5_for_file)
|
||||
@@ -255,7 +250,7 @@ def try_rm_tcs_files(tcs=None):
|
||||
info_json_fn = os.path.splitext(tc_filename)[0] + '.info.json'
|
||||
self.assertTrue(
|
||||
os.path.exists(info_json_fn),
|
||||
'Missing info file %s' % info_json_fn)
|
||||
f'Missing info file {info_json_fn}')
|
||||
with open(info_json_fn, encoding='utf-8') as infof:
|
||||
info_dict = json.load(infof)
|
||||
expect_info_dict(self, info_dict, tc.get('info_dict', {}))
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
from test.helper import http_server_port, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.downloader.http import HttpFD
|
||||
from yt_dlp.utils import encodeFilename
|
||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
@@ -38,9 +37,9 @@ def send_content_range(self, total=None):
|
||||
end = int(mobj.group(2))
|
||||
valid_range = start is not None and end is not None
|
||||
if valid_range:
|
||||
content_range = 'bytes %d-%d' % (start, end)
|
||||
content_range = f'bytes {start}-{end}'
|
||||
if total:
|
||||
content_range += '/%d' % total
|
||||
content_range += f'/{total}'
|
||||
self.send_header('Content-Range', content_range)
|
||||
return (end - start + 1) if valid_range else total
|
||||
|
||||
@@ -82,12 +81,12 @@ def download(self, params, ep):
|
||||
ydl = YoutubeDL(params)
|
||||
downloader = HttpFD(ydl, params)
|
||||
filename = 'testfile.mp4'
|
||||
try_rm(encodeFilename(filename))
|
||||
try_rm(filename)
|
||||
self.assertTrue(downloader.real_download(filename, {
|
||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
||||
'url': f'http://127.0.0.1:{self.port}/{ep}',
|
||||
}), ep)
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
||||
try_rm(encodeFilename(filename))
|
||||
self.assertEqual(os.path.getsize(filename), TEST_SIZE, ep)
|
||||
try_rm(filename)
|
||||
|
||||
def download_all(self, params):
|
||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||
|
||||
@@ -105,7 +105,7 @@ def __init__(self, socket, ssl_context, server_hostname=None, suppress_ragged_eo
|
||||
self.incoming,
|
||||
self.outgoing,
|
||||
server_hostname=server_hostname,
|
||||
server_side=server_side
|
||||
server_side=server_side,
|
||||
)
|
||||
self._ssl_io_loop(self.sslobj.do_handshake)
|
||||
|
||||
@@ -247,6 +247,7 @@ def ctx(request):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||
class TestHTTPProxy:
|
||||
def test_http_no_auth(self, handler, ctx):
|
||||
@@ -315,6 +316,7 @@ def test_http_with_idn(self, handler, ctx):
|
||||
('Requests', 'https'),
|
||||
('CurlCFFI', 'https'),
|
||||
], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestHTTPConnectProxy:
|
||||
def test_http_connect_no_auth(self, handler, ctx):
|
||||
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||
@@ -331,10 +333,6 @@ def test_http_connect_auth(self, handler, ctx):
|
||||
assert proxy_info['proxy'] == server_address
|
||||
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||
|
||||
@pytest.mark.skip_handler(
|
||||
'Requests',
|
||||
'bug in urllib3 causes unclosed socket: https://github.com/urllib3/urllib3/issues/3374'
|
||||
)
|
||||
def test_http_connect_bad_auth(self, handler, ctx):
|
||||
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||
|
||||
@@ -29,11 +29,11 @@ def error(self, msg):
|
||||
@is_download_test
|
||||
class TestIqiyiSDKInterpreter(unittest.TestCase):
|
||||
def test_iqiyi_sdk_interpreter(self):
|
||||
'''
|
||||
"""
|
||||
Test the functionality of IqiyiSDKInterpreter by trying to log in
|
||||
|
||||
If `sign` is incorrect, /validate call throws an HTTP 556 error
|
||||
'''
|
||||
"""
|
||||
logger = WarningLogger()
|
||||
ie = IqiyiIE(FakeYDL({'logger': logger}))
|
||||
ie._perform_login('foo', 'bar')
|
||||
|
||||
60
test/test_jsc/conftest.py
Normal file
60
test/test_jsc/conftest.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import re
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import yt_dlp.globals
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
_TESTDATA_PATH = pathlib.Path(__file__).parent.parent / 'testdata/sigs'
|
||||
_player_re = re.compile(r'^.+/player/(?P<id>[a-zA-Z0-9_/.-]+)\.js$')
|
||||
_player_id_trans = str.maketrans(dict.fromkeys('/.-', '_'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
runtime_names = yt_dlp.globals.supported_js_runtimes.value
|
||||
ydl = YoutubeDL({'js_runtimes': {key: {} for key in runtime_names}})
|
||||
ie = ydl.get_info_extractor('Youtube')
|
||||
|
||||
def _load_player(video_id, player_url, fatal=True):
|
||||
match = _player_re.match(player_url)
|
||||
test_id = match.group('id').translate(_player_id_trans)
|
||||
cached_file = _TESTDATA_PATH / f'player-{test_id}.js'
|
||||
|
||||
if cached_file.exists():
|
||||
return cached_file.read_text()
|
||||
|
||||
if code := ie._download_webpage(player_url, video_id, fatal=fatal):
|
||||
_TESTDATA_PATH.mkdir(exist_ok=True, parents=True)
|
||||
cached_file.write_text(code)
|
||||
return code
|
||||
|
||||
return None
|
||||
|
||||
ie._load_player = _load_player
|
||||
return ie
|
||||
|
||||
|
||||
class MockLogger:
|
||||
def trace(self, message: str):
|
||||
print(f'trace: {message}')
|
||||
|
||||
def debug(self, message: str, *, once=False):
|
||||
print(f'debug: {message}')
|
||||
|
||||
def info(self, message: str):
|
||||
print(f'info: {message}')
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
print(f'warning: {message}')
|
||||
|
||||
def error(self, message: str):
|
||||
print(f'error: {message}')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger():
|
||||
return MockLogger()
|
||||
128
test/test_jsc/test_ejs_integration.py
Normal file
128
test/test_jsc/test_ejs_integration.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import importlib.util
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeResponse,
|
||||
NChallengeInput,
|
||||
NChallengeOutput,
|
||||
SigChallengeInput,
|
||||
SigChallengeOutput,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.bun import BunJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.deno import DenoJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.node import NodeJCP
|
||||
from yt_dlp.extractor.youtube.jsc._builtin.quickjs import QuickJSJCP
|
||||
|
||||
|
||||
_has_ejs = bool(importlib.util.find_spec('yt_dlp_ejs'))
|
||||
pytestmark = pytest.mark.skipif(not _has_ejs, reason='yt-dlp-ejs not available')
|
||||
|
||||
|
||||
class Variant(enum.Enum):
|
||||
main = 'player_ias.vflset/en_US/base.js'
|
||||
tcc = 'player_ias_tcc.vflset/en_US/base.js'
|
||||
tce = 'player_ias_tce.vflset/en_US/base.js'
|
||||
es5 = 'player_es5.vflset/en_US/base.js'
|
||||
es6 = 'player_es6.vflset/en_US/base.js'
|
||||
tv = 'tv-player-ias.vflset/tv-player-ias.js'
|
||||
tv_es6 = 'tv-player-es6.vflset/tv-player-es6.js'
|
||||
phone = 'player-plasma-ias-phone-en_US.vflset/base.js'
|
||||
tablet = 'player-plasma-ias-tablet-en_US.vflset/base.js'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Challenge:
|
||||
player: str
|
||||
variant: Variant
|
||||
type: JsChallengeType
|
||||
values: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||
|
||||
def url(self, /):
|
||||
return f'https://www.youtube.com/s/player/{self.player}/{self.variant.value}'
|
||||
|
||||
|
||||
CHALLENGES: list[Challenge] = [
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': 'qmtUsIz04xxiNW',
|
||||
'4GMrWHyKI5cEvhDO': 'N9gmEX7YhKTSmw',
|
||||
}),
|
||||
Challenge('3d3ba064', Variant.tce, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3gqEctUw-NYdNmOEvaepit0zJAtIEsgOV2SXZjhSHMNy0NXNG_1kNyBf6HPuAuCduh-a7O',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': '4SvMpDQH-vBJCw',
|
||||
}),
|
||||
Challenge('5ec65609', Variant.tce, JsChallengeType.SIG, {
|
||||
'AAJAJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grH0rTMICA1mmDc0HoXgW3CAiAQQ4=CspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ=I':
|
||||
'AJfQdSswRQIhAMG5SN7-cAFChdrE7tLA6grI0rTMICA1mmDc0HoXgW3CAiAQQ4HCspfaF_vt82XH5yewvqcuEkvzeTsbRuHssRMyJQ==',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.N, {
|
||||
'_HPB-7GFg1VTkn9u': 'qUAsPryAO_ByYg',
|
||||
'K1t_fcB6phzuq2SF': 'Y7PcOt3VE62mog',
|
||||
}),
|
||||
Challenge('6742b2b9', Variant.tce, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJAA':
|
||||
'AJfQdSswRAIgMVVvrovTbw6UNh99kPa4D_XQjGT4qYu7S6SHM8EjoCACIEQnz-nKN5RgG6iUTnNJC58csYPSrnS_SzricuUMJZGM',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.N, {
|
||||
'0eRGgQWJGfT5rFHFj': 'euHbygrCMLksxd',
|
||||
}),
|
||||
Challenge('2b83d2e0', Variant.main, JsChallengeType.SIG, {
|
||||
'MMGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKn-znQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJA':
|
||||
'-MGZJMUucirzS_SnrSPYsc85CJNnTUi6GgR5NKnMznQEICACojE8MHS6S7uYq4TGjQX_D4aPk99hNU6wbTvorvVVMgIARwsSdQfJ',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.N, {
|
||||
'ZdZIqFPQK-Ty8wId': '1qov8-KM-yH',
|
||||
}),
|
||||
Challenge('638ec5c6', Variant.main, JsChallengeType.SIG, {
|
||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt':
|
||||
'MhudCuAuP-6fByOk1_GNXN7gNHHShjyXS2VOgsEItAJz0tipeav0OmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||
}),
|
||||
]
|
||||
|
||||
requests: list[JsChallengeRequest] = []
|
||||
responses: list[JsChallengeProviderResponse] = []
|
||||
for test in CHALLENGES:
|
||||
input_type, output_type = {
|
||||
JsChallengeType.N: (NChallengeInput, NChallengeOutput),
|
||||
JsChallengeType.SIG: (SigChallengeInput, SigChallengeOutput),
|
||||
}[test.type]
|
||||
|
||||
request = JsChallengeRequest(test.type, input_type(test.url(), list(test.values.keys())), test.player)
|
||||
requests.append(request)
|
||||
responses.append(JsChallengeProviderResponse(request, JsChallengeResponse(test.type, output_type(test.values))))
|
||||
|
||||
|
||||
@pytest.fixture(params=[BunJCP, DenoJCP, NodeJCP, QuickJSJCP])
|
||||
def jcp(request, ie, logger):
|
||||
obj = request.param(ie, logger, None)
|
||||
if not obj.is_available():
|
||||
pytest.skip(f'{obj.PROVIDER_NAME} is not available')
|
||||
obj.is_dev = True
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_bulk_requests(jcp):
|
||||
assert list(jcp.bulk_solve(requests)) == responses
|
||||
|
||||
|
||||
@pytest.mark.download
|
||||
def test_using_cached_player(jcp):
|
||||
first_player_requests = requests[:3]
|
||||
player = jcp._get_player(first_player_requests[0].video_id, first_player_requests[0].input.player_url)
|
||||
initial = json.loads(jcp._run_js_runtime(jcp._construct_stdin(player, False, first_player_requests)))
|
||||
preprocessed = initial.pop('preprocessed_player')
|
||||
result = json.loads(jcp._run_js_runtime(jcp._construct_stdin(preprocessed, True, first_player_requests)))
|
||||
|
||||
assert initial == result
|
||||
194
test/test_jsc/test_provider.py
Normal file
194
test/test_jsc/test_provider.py
Normal file
@@ -0,0 +1,194 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.jsc.provider import (
|
||||
JsChallengeProvider,
|
||||
JsChallengeRequest,
|
||||
JsChallengeProviderResponse,
|
||||
JsChallengeProviderRejectedRequest,
|
||||
JsChallengeType,
|
||||
JsChallengeResponse,
|
||||
NChallengeOutput,
|
||||
NChallengeInput,
|
||||
JsChallengeProviderError,
|
||||
register_provider,
|
||||
register_preference,
|
||||
)
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||
from yt_dlp.utils import ExtractorError
|
||||
from yt_dlp.extractor.youtube.jsc._registry import _jsc_preferences, _jsc_providers
|
||||
|
||||
|
||||
class ExampleJCP(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'example-provider'
|
||||
PROVIDER_VERSION = '0.0.1'
|
||||
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||
|
||||
_SUPPORTED_TYPES = [JsChallengeType.N]
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
for request in requests:
|
||||
results = dict.fromkeys(request.input.challenges, 'example-solution')
|
||||
response = JsChallengeResponse(
|
||||
type=request.type,
|
||||
output=NChallengeOutput(results=results))
|
||||
yield JsChallengeProviderResponse(request=request, response=response)
|
||||
|
||||
|
||||
PLAYER_URL = 'https://example.com/player.js'
|
||||
|
||||
|
||||
class TestJsChallengeProvider:
|
||||
# note: some test covered in TestPoTokenProvider which shares the same base class
|
||||
def test_base_type(self):
|
||||
assert issubclass(JsChallengeProvider, IEContentProvider)
|
||||
|
||||
def test_create_provider_missing_bulk_solve_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
with pytest.raises(TypeError, match='bulk_solve'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_create_provider_missing_available_method(self, ie, logger):
|
||||
class MissingMethodsJCP(JsChallengeProvider):
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
with pytest.raises(TypeError, match='is_available'):
|
||||
MissingMethodsJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
def test_barebones_provider(self, ie, logger):
|
||||
class BarebonesProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
provider = BarebonesProviderJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||
|
||||
def test_example_provider_success(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
|
||||
request = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
|
||||
request_two = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge-2']))
|
||||
|
||||
responses = list(provider.bulk_solve([request, request_two]))
|
||||
assert len(responses) == 2
|
||||
assert all(isinstance(r, JsChallengeProviderResponse) for r in responses)
|
||||
assert responses == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_two,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge-2': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_unsupported_challenge_type(self, ie, logger):
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
request_supported = JsChallengeRequest(
|
||||
type=JsChallengeType.N,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
request_unsupported = JsChallengeRequest(
|
||||
type=JsChallengeType.SIG,
|
||||
input=NChallengeInput(player_url=PLAYER_URL, challenges=['example-challenge']))
|
||||
responses = list(provider.bulk_solve([request_supported, request_unsupported, request_supported]))
|
||||
assert len(responses) == 3
|
||||
# Requests are validated first before continuing to _real_bulk_solve
|
||||
assert isinstance(responses[0], JsChallengeProviderResponse)
|
||||
assert isinstance(responses[0].error, JsChallengeProviderRejectedRequest)
|
||||
assert responses[0].request is request_unsupported
|
||||
assert str(responses[0].error) == 'JS Challenge type "JsChallengeType.SIG" is not supported by example-provider'
|
||||
|
||||
assert responses[1:] == [
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
JsChallengeProviderResponse(
|
||||
request=request_supported,
|
||||
response=JsChallengeResponse(
|
||||
type=JsChallengeType.N,
|
||||
output=NChallengeOutput(results={'example-challenge': 'example-solution'}),
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def test_provider_get_player(self, ie, logger):
|
||||
ie._load_player = lambda video_id, player_url, fatal: (video_id, player_url, fatal)
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
assert provider._get_player('video123', PLAYER_URL) == ('video123', PLAYER_URL, True)
|
||||
|
||||
def test_provider_get_player_error(self, ie, logger):
|
||||
def raise_error(video_id, player_url, fatal):
|
||||
raise ExtractorError('Failed to load player')
|
||||
|
||||
ie._load_player = raise_error
|
||||
provider = ExampleJCP(ie=ie, logger=logger, settings={})
|
||||
with pytest.raises(JsChallengeProviderError, match='Failed to load player for JS challenge'):
|
||||
provider._get_player('video123', PLAYER_URL)
|
||||
|
||||
def test_require_class_end_with_suffix(self, ie, logger):
|
||||
class InvalidSuffix(JsChallengeProvider):
|
||||
PROVIDER_NAME = 'invalid-suffix'
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
return True
|
||||
|
||||
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||
|
||||
with pytest.raises(AssertionError):
|
||||
provider.PROVIDER_KEY # noqa: B018
|
||||
|
||||
|
||||
def test_register_provider(ie):
|
||||
|
||||
@register_provider
|
||||
class UnavailableProviderJCP(JsChallengeProvider):
|
||||
def is_available(self) -> bool:
|
||||
return False
|
||||
|
||||
def _real_bulk_solve(self, requests):
|
||||
raise JsChallengeProviderRejectedRequest('Not implemented')
|
||||
|
||||
assert _jsc_providers.value.get('UnavailableProvider') == UnavailableProviderJCP
|
||||
_jsc_providers.value.pop('UnavailableProvider')
|
||||
|
||||
|
||||
def test_register_preference(ie):
|
||||
before = len(_jsc_preferences.value)
|
||||
|
||||
@register_preference(ExampleJCP)
|
||||
def unavailable_preference(*args, **kwargs):
|
||||
return 1
|
||||
|
||||
assert len(_jsc_preferences.value) == before + 1
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
import math
|
||||
|
||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter, js_number_to_string
|
||||
|
||||
|
||||
class NaN:
|
||||
@@ -92,6 +92,17 @@ def test_operators(self):
|
||||
self._test('function f(){return 0 && 1 || 2;}', 2)
|
||||
self._test('function f(){return 0 ?? 42;}', 0)
|
||||
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
||||
self._test('function f(){return 0 - 7 * - 6;}', 42)
|
||||
self._test('function f(){return true << "5";}', 32)
|
||||
self._test('function f(){return true << true;}', 2)
|
||||
self._test('function f(){return "19" & "21.9";}', 17)
|
||||
self._test('function f(){return "19" & false;}', 0)
|
||||
self._test('function f(){return "11.0" >> "2.1";}', 2)
|
||||
self._test('function f(){return 5 ^ 9;}', 12)
|
||||
self._test('function f(){return 0.0 << NaN}', 0)
|
||||
self._test('function f(){return null << undefined}', 0)
|
||||
# TODO: Does not work due to number too large
|
||||
# self._test('function f(){return 21 << 4294967297}', 42)
|
||||
|
||||
def test_array_access(self):
|
||||
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
||||
@@ -107,6 +118,7 @@ def test_assignments(self):
|
||||
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
||||
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
||||
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
||||
self._test('function f(){var x = 2; var y = ["a", "b"]; y[x%y["length"]]="z"; return y}', ['z', 'b'])
|
||||
|
||||
@unittest.skip('Not implemented')
|
||||
def test_comments(self):
|
||||
@@ -373,7 +385,161 @@ def test_negative(self):
|
||||
@unittest.skip('Not implemented')
|
||||
def test_packed(self):
|
||||
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
|
||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|'))) # noqa: SIM905
|
||||
|
||||
def test_join(self):
|
||||
test_input = list('test')
|
||||
tests = [
|
||||
'function f(a, b){return a.join(b)}',
|
||||
'function f(a, b){return Array.prototype.join.call(a, b)}',
|
||||
'function f(a, b){return Array.prototype.join.apply(a, [b])}',
|
||||
]
|
||||
for test in tests:
|
||||
jsi = JSInterpreter(test)
|
||||
self._test(jsi, 'test', args=[test_input, ''])
|
||||
self._test(jsi, 't-e-s-t', args=[test_input, '-'])
|
||||
self._test(jsi, '', args=[[], '-'])
|
||||
|
||||
def test_split(self):
|
||||
test_result = list('test')
|
||||
tests = [
|
||||
'function f(a, b){return a.split(b)}',
|
||||
'function f(a, b){return a["split"](b)}',
|
||||
'function f(a, b){let x = ["split"]; return a[x[0]](b)}',
|
||||
'function f(a, b){return String.prototype.split.call(a, b)}',
|
||||
'function f(a, b){return String.prototype.split.apply(a, [b])}',
|
||||
]
|
||||
for test in tests:
|
||||
jsi = JSInterpreter(test)
|
||||
self._test(jsi, test_result, args=['test', ''])
|
||||
self._test(jsi, test_result, args=['t-e-s-t', '-'])
|
||||
self._test(jsi, [''], args=['', '-'])
|
||||
self._test(jsi, [], args=['', ''])
|
||||
|
||||
def test_slice(self):
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice()}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(5)}', [5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(99)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-2)}', [7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-99)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 0)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, 0)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 1)}', [0])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(3, 6)}', [3, 4, 5])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, -1)}', [1, 2, 3, 4, 5, 6, 7])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-1, 1)}', [])
|
||||
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-3, -1)}', [6, 7])
|
||||
self._test('function f(){return "012345678".slice()}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(0)}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(5)}', '5678')
|
||||
self._test('function f(){return "012345678".slice(99)}', '')
|
||||
self._test('function f(){return "012345678".slice(-2)}', '78')
|
||||
self._test('function f(){return "012345678".slice(-99)}', '012345678')
|
||||
self._test('function f(){return "012345678".slice(0, 0)}', '')
|
||||
self._test('function f(){return "012345678".slice(1, 0)}', '')
|
||||
self._test('function f(){return "012345678".slice(0, 1)}', '0')
|
||||
self._test('function f(){return "012345678".slice(3, 6)}', '345')
|
||||
self._test('function f(){return "012345678".slice(1, -1)}', '1234567')
|
||||
self._test('function f(){return "012345678".slice(-1, 1)}', '')
|
||||
self._test('function f(){return "012345678".slice(-3, -1)}', '67')
|
||||
|
||||
def test_splice(self):
|
||||
self._test('function f(){var T = ["0", "1", "2"]; T["splice"](2, 1, "0")[0]; return T }', ['0', '1', '0'])
|
||||
|
||||
def test_js_number_to_string(self):
|
||||
for test, radix, expected in [
|
||||
(0, None, '0'),
|
||||
(-0, None, '0'),
|
||||
(0.0, None, '0'),
|
||||
(-0.0, None, '0'),
|
||||
(math.nan, None, 'NaN'),
|
||||
(-math.nan, None, 'NaN'),
|
||||
(math.inf, None, 'Infinity'),
|
||||
(-math.inf, None, '-Infinity'),
|
||||
(10 ** 21.5, 8, '526665530627250154000000'),
|
||||
(6, 2, '110'),
|
||||
(254, 16, 'fe'),
|
||||
(-10, 2, '-1010'),
|
||||
(-0xff, 2, '-11111111'),
|
||||
(0.1 + 0.2, 16, '0.4cccccccccccd'),
|
||||
(1234.1234, 10, '1234.1234'),
|
||||
# (1000000000000000128, 10, '1000000000000000100')
|
||||
]:
|
||||
assert js_number_to_string(test, radix) == expected
|
||||
|
||||
def test_extract_function(self):
|
||||
jsi = JSInterpreter('function a(b) { return b + 1; }')
|
||||
func = jsi.extract_function('a')
|
||||
self.assertEqual(func([2]), 3)
|
||||
|
||||
def test_extract_function_with_global_stack(self):
|
||||
jsi = JSInterpreter('function c(d) { return d + e + f + g; }')
|
||||
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
||||
self.assertEqual(func([1]), 1111)
|
||||
|
||||
def test_extract_object(self):
|
||||
jsi = JSInterpreter('var a={};a.xy={};var xy;var zxy={};xy={z:function(){return "abc"}};')
|
||||
self.assertTrue('z' in jsi.extract_object('xy', None))
|
||||
|
||||
def test_increment_decrement(self):
|
||||
self._test('function f() { var x = 1; return ++x; }', 2)
|
||||
self._test('function f() { var x = 1; return x++; }', 1)
|
||||
self._test('function f() { var x = 1; x--; return x }', 0)
|
||||
self._test('function f() { var y; var x = 1; x++, --x, x--, x--, y="z", "abc", x++; return --x }', -1)
|
||||
self._test('function f() { var a = "test--"; return a; }', 'test--')
|
||||
self._test('function f() { var b = 1; var a = "b--"; return a; }', 'b--')
|
||||
|
||||
def test_nested_function_scoping(self):
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var g = function() {
|
||||
var P = 2;
|
||||
return P;
|
||||
};
|
||||
var P = 1;
|
||||
g();
|
||||
return P;
|
||||
}
|
||||
''', 1)
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var x = function() {
|
||||
for (var w = 1, M = []; w < 2; w++) switch (w) {
|
||||
case 1:
|
||||
M.push("a");
|
||||
case 2:
|
||||
M.push("b");
|
||||
}
|
||||
return M
|
||||
};
|
||||
var w = "c";
|
||||
var M = "d";
|
||||
var y = x();
|
||||
y.push(w);
|
||||
y.push(M);
|
||||
return y;
|
||||
}
|
||||
''', ['a', 'b', 'c', 'd'])
|
||||
self._test(R'''
|
||||
function f() {
|
||||
var P, Q;
|
||||
var z = 100;
|
||||
var g = function() {
|
||||
var P, Q; P = 2; Q = 15;
|
||||
z = 0;
|
||||
return P+Q;
|
||||
};
|
||||
P = 1; Q = 10;
|
||||
var x = g(), y = 3;
|
||||
return P+Q+x+y+z;
|
||||
}
|
||||
''', 31)
|
||||
|
||||
def test_undefined_varnames(self):
|
||||
jsi = JSInterpreter('function f(){ var a; return [a, b]; }')
|
||||
self._test(jsi, [JS_Undefined, JS_Undefined])
|
||||
self.assertEqual(jsi._undefined_varnames, {'b'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -21,7 +21,7 @@ def test_netrc_present(self):
|
||||
continue
|
||||
self.assertTrue(
|
||||
ie._NETRC_MACHINE,
|
||||
'Extractor %s supports login, but is missing a _NETRC_MACHINE property' % ie.IE_NAME)
|
||||
f'Extractor {ie.IE_NAME} supports login, but is missing a _NETRC_MACHINE property')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,7 +23,6 @@
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import warnings
|
||||
import zlib
|
||||
@@ -39,6 +39,7 @@
|
||||
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
||||
from yt_dlp.networking import (
|
||||
HEADRequest,
|
||||
PATCHRequest,
|
||||
PUTRequest,
|
||||
Request,
|
||||
RequestDirector,
|
||||
@@ -222,10 +223,7 @@ def do_GET(self):
|
||||
if encoding == 'br' and brotli:
|
||||
payload = brotli.compress(payload)
|
||||
elif encoding == 'gzip':
|
||||
buf = io.BytesIO()
|
||||
with gzip.GzipFile(fileobj=buf, mode='wb') as f:
|
||||
f.write(payload)
|
||||
payload = buf.getvalue()
|
||||
payload = gzip.compress(payload, mtime=0)
|
||||
elif encoding == 'deflate':
|
||||
payload = zlib.compress(payload)
|
||||
elif encoding == 'unsupported':
|
||||
@@ -265,6 +263,11 @@ def do_GET(self):
|
||||
self.end_headers()
|
||||
self.wfile.write(payload)
|
||||
self.finish()
|
||||
elif self.path == '/get_cookie':
|
||||
self.send_response(200)
|
||||
self.send_header('Set-Cookie', 'test=ytdlp; path=/')
|
||||
self.end_headers()
|
||||
self.finish()
|
||||
else:
|
||||
self._status(404)
|
||||
|
||||
@@ -309,6 +312,7 @@ def setup_class(cls):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
def test_verify_cert(self, handler):
|
||||
@@ -338,6 +342,52 @@ def test_ssl_error(self, handler):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'legacy_ssl ignored by CurlCFFI')
|
||||
def test_legacy_ssl_extension(self, handler):
|
||||
# HTTPS server with old ciphers
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
https_httpd = http.server.ThreadingHTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.maximum_version = ssl.TLSVersion.TLSv1_2
|
||||
sslctx.set_ciphers('SHA1:AESCCM:aDSS:eNULL:aNULL')
|
||||
sslctx.load_cert_chain(os.path.join(TEST_DIR, 'testcert.pem'), None)
|
||||
https_httpd.socket = sslctx.wrap_socket(https_httpd.socket, server_side=True)
|
||||
https_port = http_server_port(https_httpd)
|
||||
https_server_thread = threading.Thread(target=https_httpd.serve_forever)
|
||||
https_server_thread.daemon = True
|
||||
https_server_thread.start()
|
||||
|
||||
with handler(verify=False) as rh:
|
||||
res = validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers', extensions={'legacy_ssl': True}))
|
||||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
# Ensure only applies to request extension
|
||||
with pytest.raises(SSLError):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'legacy_ssl ignored by CurlCFFI')
|
||||
def test_legacy_ssl_support(self, handler):
|
||||
# HTTPS server with old ciphers
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
https_httpd = http.server.ThreadingHTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.maximum_version = ssl.TLSVersion.TLSv1_2
|
||||
sslctx.set_ciphers('SHA1:AESCCM:aDSS:eNULL:aNULL')
|
||||
sslctx.load_cert_chain(os.path.join(TEST_DIR, 'testcert.pem'), None)
|
||||
https_httpd.socket = sslctx.wrap_socket(https_httpd.socket, server_side=True)
|
||||
https_port = http_server_port(https_httpd)
|
||||
https_server_thread = threading.Thread(target=https_httpd.serve_forever)
|
||||
https_server_thread.daemon = True
|
||||
https_server_thread.start()
|
||||
|
||||
with handler(verify=False, legacy_ssl_support=True) as rh:
|
||||
res = validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
def test_percent_encode(self, handler):
|
||||
with handler() as rh:
|
||||
# Unicode characters should be encoded with uppercase percent-encoding
|
||||
@@ -375,10 +425,10 @@ def test_raise_http_error(self, handler):
|
||||
with handler() as rh:
|
||||
for bad_status in (400, 500, 599, 302):
|
||||
with pytest.raises(HTTPError):
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/gen_%d' % (self.http_port, bad_status)))
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_{bad_status}'))
|
||||
|
||||
# Should not raise an error
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200')).close()
|
||||
|
||||
def test_response_url(self, handler):
|
||||
with handler() as rh:
|
||||
@@ -472,7 +522,7 @@ def test_redirect_loop(self, handler):
|
||||
def test_incompleteread(self, handler):
|
||||
with handler(timeout=2) as rh:
|
||||
with pytest.raises(IncompleteRead, match='13 bytes read, 234221 more expected'):
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/incompleteread')).read()
|
||||
|
||||
def test_cookies(self, handler):
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
@@ -490,6 +540,24 @@ def test_cookies(self, handler):
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
|
||||
def test_cookie_sync_only_cookiejar(self, handler):
|
||||
# Ensure that cookies are ONLY being handled by the cookiejar
|
||||
with handler() as rh:
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/get_cookie', extensions={'cookiejar': YoutubeDLCookieJar()}))
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': YoutubeDLCookieJar()})).read()
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
|
||||
def test_cookie_sync_delete_cookie(self, handler):
|
||||
# Ensure that cookies are ONLY being handled by the cookiejar
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
with handler(cookiejar=cookiejar) as rh:
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/get_cookie'))
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
cookiejar.clear_session_cookies()
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
|
||||
def test_headers(self, handler):
|
||||
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
@@ -545,12 +613,14 @@ def test_source_address(self, handler):
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
|
||||
assert source_address == data
|
||||
|
||||
# Not supported by CurlCFFI
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_gzip_trailing_garbage(self, handler):
|
||||
with handler() as rh:
|
||||
data = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage')).read().decode()
|
||||
res = validate_and_send(rh, Request(f'http://localhost:{self.http_port}/trailing_garbage'))
|
||||
data = res.read().decode()
|
||||
assert data == '<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not applicable to curl-cffi')
|
||||
@pytest.mark.skipif(not brotli, reason='brotli support is not installed')
|
||||
@@ -562,6 +632,8 @@ def test_brotli(self, handler):
|
||||
headers={'ytdl-encoding': 'br'}))
|
||||
assert res.headers.get('Content-Encoding') == 'br'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_deflate(self, handler):
|
||||
with handler() as rh:
|
||||
@@ -571,6 +643,8 @@ def test_deflate(self, handler):
|
||||
headers={'ytdl-encoding': 'deflate'}))
|
||||
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_gzip(self, handler):
|
||||
with handler() as rh:
|
||||
@@ -580,6 +654,8 @@ def test_gzip(self, handler):
|
||||
headers={'ytdl-encoding': 'gzip'}))
|
||||
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_multiple_encodings(self, handler):
|
||||
with handler() as rh:
|
||||
@@ -590,6 +666,8 @@ def test_multiple_encodings(self, handler):
|
||||
headers={'ytdl-encoding': pair}))
|
||||
assert res.headers.get('Content-Encoding') == pair
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
@pytest.mark.skip_handler('CurlCFFI', 'not supported by curl-cffi')
|
||||
def test_unsupported_encoding(self, handler):
|
||||
@@ -600,6 +678,8 @@ def test_unsupported_encoding(self, handler):
|
||||
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||
assert res.read() == b'raw'
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_read(self, handler):
|
||||
with handler() as rh:
|
||||
@@ -607,9 +687,13 @@ def test_read(self, handler):
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers'))
|
||||
assert res.readable()
|
||||
assert res.read(1) == b'H'
|
||||
# Ensure we don't close the adaptor yet
|
||||
assert not res.closed
|
||||
assert res.read(3) == b'ost'
|
||||
assert res.read().decode().endswith('\n\n')
|
||||
assert res.read() == b''
|
||||
# Should auto-close and mark the response adaptor as closed
|
||||
assert res.closed
|
||||
|
||||
def test_request_disable_proxy(self, handler):
|
||||
for proxy_proto in handler._SUPPORTED_PROXY_SCHEMES or ['http']:
|
||||
@@ -651,8 +735,29 @@ def test_allproxy(self, handler):
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', proxies={'all': 'http://10.255.255.255'})).close()
|
||||
|
||||
@pytest.mark.skip_handlers_if(lambda _, handler: handler not in ['Urllib', 'CurlCFFI'], 'handler does not support keep_header_casing')
|
||||
def test_keep_header_casing(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', headers={'X-test-heaDer': 'test'}, extensions={'keep_header_casing': True})).read().decode()
|
||||
|
||||
assert 'X-test-heaDer: test' in res
|
||||
|
||||
def test_partial_read_then_full_read(self, handler):
|
||||
with handler() as rh:
|
||||
for encoding in ('', 'gzip', 'deflate'):
|
||||
res = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||
headers={'ytdl-encoding': encoding}))
|
||||
assert res.headers.get('Content-Encoding') == encoding
|
||||
assert res.read(6) == b'<html>'
|
||||
assert res.read(0) == b''
|
||||
assert res.read() == b'<video src="/vid.mp4" /></html>'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', reason='segfaults')
|
||||
class TestClientCertificate:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
@@ -740,7 +845,7 @@ class TestRequestHandlerMisc:
|
||||
@pytest.mark.parametrize('handler,logger_name', [
|
||||
('Requests', 'urllib3'),
|
||||
('Websockets', 'websockets.client'),
|
||||
('Websockets', 'websockets.server')
|
||||
('Websockets', 'websockets.server'),
|
||||
], indirect=['handler'])
|
||||
def test_remove_logging_handler(self, handler, logger_name):
|
||||
# Ensure any logging handlers, which may contain a YoutubeDL instance,
|
||||
@@ -753,6 +858,24 @@ def test_remove_logging_handler(self, handler, logger_name):
|
||||
rh.close()
|
||||
assert len(logging_handlers) == before_count
|
||||
|
||||
def test_wrap_request_errors(self):
|
||||
class TestRequestHandler(RequestHandler):
|
||||
def _validate(self, request):
|
||||
if request.headers.get('x-fail'):
|
||||
raise UnsupportedRequest('test error')
|
||||
|
||||
def _send(self, request: Request):
|
||||
raise RequestError('test error')
|
||||
|
||||
with TestRequestHandler(logger=FakeLogger()) as rh:
|
||||
with pytest.raises(UnsupportedRequest, match='test error') as exc_info:
|
||||
rh.validate(Request('http://example.com', headers={'x-fail': '1'}))
|
||||
assert exc_info.value.handler is rh
|
||||
|
||||
with pytest.raises(RequestError, match='test error') as exc_info:
|
||||
rh.send(Request('http://example.com'))
|
||||
assert exc_info.value.handler is rh
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib'], indirect=True)
|
||||
class TestUrllibRequestHandler(TestRequestHandlerBase):
|
||||
@@ -772,11 +895,31 @@ def test_file_urls(self, handler):
|
||||
|
||||
with handler(enable_file_urls=True) as rh:
|
||||
res = validate_and_send(rh, req)
|
||||
assert res.read() == b'foobar'
|
||||
res.close()
|
||||
assert res.read(1) == b'f'
|
||||
assert not res.fp.closed
|
||||
assert res.read() == b'oobar'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
|
||||
os.unlink(tf.name)
|
||||
|
||||
def test_data_uri_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request('data:text/plain,hello%20world'))
|
||||
assert res.read() == b'hello world'
|
||||
# Should automatically close the underlying file object
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert isinstance(res.fp, http.client.HTTPResponse)
|
||||
assert res.fp.fp is None
|
||||
assert res.closed
|
||||
|
||||
def test_http_error_returns_content(self, handler):
|
||||
# urllib HTTPError will try close the underlying response if reference to the HTTPError object is lost
|
||||
def get_response():
|
||||
@@ -794,7 +937,7 @@ def test_verify_cert_error_text(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(
|
||||
CertificateVerifyError,
|
||||
match=r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: self.signed certificate'
|
||||
match=r'\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: self.signed certificate',
|
||||
):
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{self.https_port}/headers'))
|
||||
|
||||
@@ -804,14 +947,14 @@ def test_verify_cert_error_text(self, handler):
|
||||
(
|
||||
Request('http://127.0.0.1', method='GET\n'),
|
||||
'method can\'t contain control characters',
|
||||
lambda v: v < (3, 7, 9) or (3, 8, 0) <= v < (3, 8, 5)
|
||||
lambda v: v < (3, 7, 9) or (3, 8, 0) <= v < (3, 8, 5),
|
||||
),
|
||||
# https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1265
|
||||
# bpo-38576: Check implemented in 3.7.8+, 3.8.3+
|
||||
(
|
||||
Request('http://127.0.0. 1', method='GET'),
|
||||
'URL can\'t contain control characters',
|
||||
lambda v: v < (3, 7, 8) or (3, 8, 0) <= v < (3, 8, 3)
|
||||
lambda v: v < (3, 7, 8) or (3, 8, 0) <= v < (3, 8, 3),
|
||||
),
|
||||
# https://github.com/python/cpython/blob/987b712b4aeeece336eed24fcc87a950a756c3e2/Lib/http/client.py#L1288C31-L1288C50
|
||||
(Request('http://127.0.0.1', headers={'foo\n': 'bar'}), 'Invalid header name', None),
|
||||
@@ -840,7 +983,7 @@ class TestRequestsRequestHandler(TestRequestHandlerBase):
|
||||
(lambda: requests.exceptions.InvalidHeader(), RequestError),
|
||||
# catch-all: https://github.com/psf/requests/blob/main/src/requests/adapters.py#L535
|
||||
(lambda: urllib3.exceptions.HTTPError(), TransportError),
|
||||
(lambda: requests.exceptions.RequestException(), RequestError)
|
||||
(lambda: requests.exceptions.RequestException(), RequestError),
|
||||
# (lambda: requests.exceptions.TooManyRedirects(), HTTPError) - Needs a response object
|
||||
])
|
||||
def test_request_error_mapping(self, handler, monkeypatch, raised, expected):
|
||||
@@ -868,12 +1011,12 @@ def request(self, *args, **kwargs):
|
||||
(
|
||||
lambda: urllib3.exceptions.ProtocolError('error', http.client.IncompleteRead(partial=b'abc', expected=4)),
|
||||
IncompleteRead,
|
||||
'3 bytes read, 4 more expected'
|
||||
'3 bytes read, 4 more expected',
|
||||
),
|
||||
(
|
||||
lambda: urllib3.exceptions.ProtocolError('error', urllib3.exceptions.IncompleteRead(partial=3, expected=5)),
|
||||
IncompleteRead,
|
||||
'3 bytes read, 5 more expected'
|
||||
'3 bytes read, 5 more expected',
|
||||
),
|
||||
])
|
||||
def test_response_error_mapping(self, handler, monkeypatch, raised, expected, match):
|
||||
@@ -909,12 +1052,20 @@ def mock_close(*args, **kwargs):
|
||||
rh.close()
|
||||
assert called
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||
@pytest.mark.handler_flaky('CurlCFFI', os.name == 'nt', reason='segfaults')
|
||||
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
@pytest.mark.parametrize('params,extensions', [
|
||||
({}, {'impersonate': ImpersonateTarget('chrome')}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '110')}, {}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '99')}, {'impersonate': ImpersonateTarget('chrome', '110')}),
|
||||
])
|
||||
@@ -1075,6 +1226,14 @@ def close(self):
|
||||
assert res4.closed
|
||||
assert res4._buffer == b''
|
||||
|
||||
def test_http_response_auto_close(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/gen_200'))
|
||||
assert res.read() == b'<html></html>'
|
||||
# Should automatically close the underlying file object in the HTTP Response
|
||||
assert res.fp.closed
|
||||
assert res.closed
|
||||
|
||||
|
||||
def run_validation(handler, error, req, **handler_kwargs):
|
||||
with handler(**handler_kwargs) as rh:
|
||||
@@ -1125,7 +1284,7 @@ class HTTPSupportedRH(ValidationRH):
|
||||
('https', False, {}),
|
||||
]),
|
||||
(NoCheckRH, [('http', False, {})]),
|
||||
(ValidationRH, [('http', UnsupportedRequest, {})])
|
||||
(ValidationRH, [('http', UnsupportedRequest, {})]),
|
||||
]
|
||||
|
||||
PROXY_SCHEME_TESTS = [
|
||||
@@ -1200,6 +1359,10 @@ class HTTPSupportedRH(ValidationRH):
|
||||
({'timeout': 1}, False),
|
||||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
({'keep_header_casing': True}, UnsupportedRequest),
|
||||
]),
|
||||
('Requests', 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||
@@ -1207,6 +1370,12 @@ class HTTPSupportedRH(ValidationRH):
|
||||
({'timeout': 1}, False),
|
||||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
({'keep_header_casing': False}, False),
|
||||
({'keep_header_casing': True}, False),
|
||||
({'keep_header_casing': 'notabool'}, AssertionError),
|
||||
]),
|
||||
('CurlCFFI', 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||
@@ -1219,7 +1388,10 @@ class HTTPSupportedRH(ValidationRH):
|
||||
({'impersonate': ImpersonateTarget('chrome', None, None, None)}, False),
|
||||
({'impersonate': ImpersonateTarget(None, None, None, None)}, False),
|
||||
({'impersonate': ImpersonateTarget()}, False),
|
||||
({'impersonate': 'chrome'}, AssertionError)
|
||||
({'impersonate': 'chrome'}, AssertionError),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
]),
|
||||
(NoCheckRH, 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, False),
|
||||
@@ -1228,6 +1400,9 @@ class HTTPSupportedRH(ValidationRH):
|
||||
('Websockets', 'ws', [
|
||||
({'cookiejar': YoutubeDLCookieJar()}, False),
|
||||
({'timeout': 2}, False),
|
||||
({'legacy_ssl': False}, False),
|
||||
({'legacy_ssl': True}, False),
|
||||
({'legacy_ssl': 'notabool'}, AssertionError),
|
||||
]),
|
||||
]
|
||||
|
||||
@@ -1235,7 +1410,7 @@ class HTTPSupportedRH(ValidationRH):
|
||||
('Urllib', False, 'http'),
|
||||
('Requests', False, 'http'),
|
||||
('CurlCFFI', False, 'http'),
|
||||
('Websockets', False, 'ws')
|
||||
('Websockets', False, 'ws'),
|
||||
], indirect=['handler'])
|
||||
def test_no_proxy(self, handler, fail, scheme):
|
||||
run_validation(handler, fail, Request(f'{scheme}://', proxies={'no': '127.0.0.1,github.com'}))
|
||||
@@ -1246,7 +1421,7 @@ def test_no_proxy(self, handler, fail, scheme):
|
||||
(HTTPSupportedRH, 'http'),
|
||||
('Requests', 'http'),
|
||||
('CurlCFFI', 'http'),
|
||||
('Websockets', 'ws')
|
||||
('Websockets', 'ws'),
|
||||
], indirect=['handler'])
|
||||
def test_empty_proxy(self, handler, scheme):
|
||||
run_validation(handler, False, Request(f'{scheme}://', proxies={scheme: None}))
|
||||
@@ -1258,7 +1433,7 @@ def test_empty_proxy(self, handler, scheme):
|
||||
(HTTPSupportedRH, 'http'),
|
||||
('Requests', 'http'),
|
||||
('CurlCFFI', 'http'),
|
||||
('Websockets', 'ws')
|
||||
('Websockets', 'ws'),
|
||||
], indirect=['handler'])
|
||||
def test_invalid_proxy_url(self, handler, scheme, proxy_url):
|
||||
run_validation(handler, UnsupportedRequest, Request(f'{scheme}://', proxies={scheme: proxy_url}))
|
||||
@@ -1474,7 +1649,7 @@ def test_compat_opener(self):
|
||||
@pytest.mark.parametrize('proxy,expected', [
|
||||
('http://127.0.0.1:8080', {'all': 'http://127.0.0.1:8080'}),
|
||||
('', {'all': '__noproxy__'}),
|
||||
(None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https
|
||||
(None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}), # env, set https
|
||||
])
|
||||
def test_proxy(self, proxy, expected, monkeypatch):
|
||||
monkeypatch.setenv('HTTP_PROXY', 'http://127.0.0.1:8081')
|
||||
@@ -1546,7 +1721,7 @@ def _send(self, request: Request):
|
||||
with FakeImpersonationRHYDL() as ydl:
|
||||
with pytest.raises(
|
||||
RequestError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
match=r'Impersonate target "test" is not available',
|
||||
):
|
||||
ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
|
||||
|
||||
@@ -1558,7 +1733,7 @@ def _send(self, request: Request):
|
||||
pass
|
||||
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc',): 'test'}
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc'): 'test'}
|
||||
_SUPPORTED_PROXY_SCHEMES = None
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -1567,14 +1742,14 @@ def _send(self, request: Request):
|
||||
with FakeHTTPRHYDL() as ydl:
|
||||
with pytest.raises(
|
||||
RequestError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
match=r'Impersonate target "test" is not available',
|
||||
):
|
||||
ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
|
||||
|
||||
def test_raise_impersonate_error(self):
|
||||
with pytest.raises(
|
||||
YoutubeDLError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
match=r'Impersonate target "test" is not available',
|
||||
):
|
||||
FakeYDL({'impersonate': ImpersonateTarget('test', None, None, None)})
|
||||
|
||||
@@ -1592,7 +1767,7 @@ def _send(self, request: Request):
|
||||
monkeypatch.setattr(FakeYDL, 'build_request_director', lambda cls, handlers, preferences=None: brh(cls, handlers=[IRH]))
|
||||
|
||||
with FakeYDL({
|
||||
'impersonate': ImpersonateTarget('abc', None, None, None)
|
||||
'impersonate': ImpersonateTarget('abc', None, None, None),
|
||||
}) as ydl:
|
||||
rh = self.build_handler(ydl, IRH)
|
||||
assert rh.impersonate == ImpersonateTarget('abc', None, None, None)
|
||||
@@ -1604,7 +1779,7 @@ class TestRH(ImpersonateRequestHandler):
|
||||
def _send(self, request: Request):
|
||||
pass
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget(target_client,): 'test'}
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget(target_client): 'test'}
|
||||
RH_KEY = target_client
|
||||
RH_NAME = target_client
|
||||
handlers.append(TestRH)
|
||||
@@ -1614,7 +1789,7 @@ def _send(self, request: Request):
|
||||
assert set(ydl._get_available_impersonate_targets()) == {
|
||||
(ImpersonateTarget('xyz'), 'xyz'),
|
||||
(ImpersonateTarget('abc'), 'abc'),
|
||||
(ImpersonateTarget('asd'), 'asd')
|
||||
(ImpersonateTarget('asd'), 'asd'),
|
||||
}
|
||||
assert ydl._impersonate_target_available(ImpersonateTarget('abc'))
|
||||
assert ydl._impersonate_target_available(ImpersonateTarget())
|
||||
@@ -1746,6 +1921,7 @@ def test_method(self):
|
||||
|
||||
def test_request_helpers(self):
|
||||
assert HEADRequest('http://example.com').method == 'HEAD'
|
||||
assert PATCHRequest('http://example.com').method == 'PATCH'
|
||||
assert PUTRequest('http://example.com').method == 'PUT'
|
||||
|
||||
def test_headers(self):
|
||||
@@ -1837,7 +2013,7 @@ def test_copy(self):
|
||||
extensions={'cookiejar': CookieJar()},
|
||||
headers={'Accept-Encoding': 'br'},
|
||||
proxies={'http': 'http://127.0.0.1'},
|
||||
data=[b'123']
|
||||
data=[b'123'],
|
||||
)
|
||||
req_copy = req.copy()
|
||||
assert req_copy is not req
|
||||
@@ -1863,7 +2039,7 @@ class AnotherRequest(Request):
|
||||
assert isinstance(req.copy(), AnotherRequest)
|
||||
|
||||
def test_url(self):
|
||||
req = Request(url='https://фtest.example.com/ some spaceв?ä=c',)
|
||||
req = Request(url='https://фtest.example.com/ some spaceв?ä=c')
|
||||
assert req.url == 'https://xn--test-z6d.example.com/%20some%20space%D0%B2?%C3%A4=c'
|
||||
|
||||
assert Request(url='//example.com').url == 'http://example.com'
|
||||
@@ -1878,7 +2054,7 @@ class TestResponse:
|
||||
('custom', 200, 'custom'),
|
||||
(None, 404, 'Not Found'), # fallback status
|
||||
('', 403, 'Forbidden'),
|
||||
(None, 999, None)
|
||||
(None, 999, None),
|
||||
])
|
||||
def test_reason(self, reason, status, expected):
|
||||
res = Response(io.BytesIO(b''), url='test://', headers={}, status=status, reason=reason)
|
||||
@@ -1913,6 +2089,30 @@ def test_compat(self):
|
||||
assert res.info() is res.headers
|
||||
assert res.getheader('test') == res.get_header('test')
|
||||
|
||||
def test_auto_close(self):
|
||||
# Should mark the response as closed if the underlying file is closed
|
||||
class AutoCloseBytesIO(io.BytesIO):
|
||||
def read(self, size=-1, /):
|
||||
data = super().read(size)
|
||||
self.close()
|
||||
return data
|
||||
|
||||
fp = AutoCloseBytesIO(b'test')
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
assert not res.closed
|
||||
res.read()
|
||||
assert res.closed
|
||||
|
||||
def test_close(self):
|
||||
# Should not call close() on the underlying file when already closed
|
||||
fp = MagicMock()
|
||||
fp.closed = False
|
||||
res = Response(fp, url='test://', headers={}, status=200)
|
||||
res.close()
|
||||
fp.closed = True
|
||||
res.close()
|
||||
assert fp.close.call_count == 1
|
||||
|
||||
|
||||
class TestImpersonateTarget:
|
||||
@pytest.mark.parametrize('target_str,expected', [
|
||||
@@ -1933,7 +2133,7 @@ def test_target_from_str(self, target_str, expected):
|
||||
|
||||
@pytest.mark.parametrize('target_str', [
|
||||
'-120', ':-12.0', '-12:-12', '-:-',
|
||||
'::', 'a-c-d:', 'a-c-d:e-f-g', 'a:b:'
|
||||
'::', 'a-c-d:', 'a-c-d:e-f-g', 'a:b:',
|
||||
])
|
||||
def test_target_from_invalid_str(self, target_str):
|
||||
with pytest.raises(ValueError):
|
||||
@@ -1949,7 +2149,7 @@ def test_target_from_invalid_str(self, target_str):
|
||||
(ImpersonateTarget('abc', '120', 'xyz', None), 'abc-120:xyz'),
|
||||
(ImpersonateTarget('abc', None, 'xyz'), 'abc:xyz'),
|
||||
(ImpersonateTarget(None, None, 'xyz', '6.5'), ':xyz-6.5'),
|
||||
(ImpersonateTarget('abc', ), 'abc'),
|
||||
(ImpersonateTarget('abc'), 'abc'),
|
||||
(ImpersonateTarget(None, None, None, None), ''),
|
||||
])
|
||||
def test_str(self, target, expected):
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
add_accept_encoding_header,
|
||||
get_redirect_method,
|
||||
make_socks_proxy_opts,
|
||||
select_proxy,
|
||||
ssl_load_certs,
|
||||
)
|
||||
from yt_dlp.networking.exceptions import (
|
||||
@@ -28,7 +27,7 @@
|
||||
IncompleteRead,
|
||||
)
|
||||
from yt_dlp.socks import ProxyType
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict, select_proxy
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
@@ -39,7 +38,7 @@ def test_select_proxy(self):
|
||||
proxies = {
|
||||
'all': 'socks5://example.com',
|
||||
'http': 'http://example.com:1080',
|
||||
'no': 'bypass.example.com,yt-dl.org'
|
||||
'no': 'bypass.example.com,yt-dl.org',
|
||||
}
|
||||
|
||||
assert select_proxy('https://example.com', proxies) == proxies['all']
|
||||
@@ -54,7 +53,7 @@ def test_select_proxy(self):
|
||||
'port': 1080,
|
||||
'rdns': True,
|
||||
'username': None,
|
||||
'password': None
|
||||
'password': None,
|
||||
}),
|
||||
('socks5://user:@example.com:5555', {
|
||||
'proxytype': ProxyType.SOCKS5,
|
||||
@@ -62,7 +61,7 @@ def test_select_proxy(self):
|
||||
'port': 5555,
|
||||
'rdns': False,
|
||||
'username': 'user',
|
||||
'password': ''
|
||||
'password': '',
|
||||
}),
|
||||
('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
|
||||
'proxytype': ProxyType.SOCKS4,
|
||||
@@ -70,7 +69,7 @@ def test_select_proxy(self):
|
||||
'port': 1080,
|
||||
'rdns': False,
|
||||
'username': 'u@ser',
|
||||
'password': 'pa ss'
|
||||
'password': 'pa ss',
|
||||
}),
|
||||
('socks4a://:pa%20ss@127.0.0.1', {
|
||||
'proxytype': ProxyType.SOCKS4A,
|
||||
@@ -78,8 +77,8 @@ def test_select_proxy(self):
|
||||
'port': 1080,
|
||||
'rdns': True,
|
||||
'username': '',
|
||||
'password': 'pa ss'
|
||||
})
|
||||
'password': 'pa ss',
|
||||
}),
|
||||
])
|
||||
def test_make_socks_proxy_opts(self, socks_proxy, expected):
|
||||
assert make_socks_proxy_opts(socks_proxy) == expected
|
||||
|
||||
@@ -27,9 +27,9 @@ def test_default_overwrites(self):
|
||||
[
|
||||
sys.executable, 'yt_dlp/__main__.py',
|
||||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw'
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||
@@ -39,9 +39,9 @@ def test_yes_overwrites(self):
|
||||
[
|
||||
sys.executable, 'yt_dlp/__main__.py', '--yes-overwrites',
|
||||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw'
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' not in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||
|
||||
@@ -10,41 +10,103 @@
|
||||
sys.path.append(str(TEST_DATA_DIR))
|
||||
importlib.invalidate_caches()
|
||||
|
||||
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
||||
from yt_dlp.plugins import (
|
||||
PACKAGE_NAME,
|
||||
PluginSpec,
|
||||
directories,
|
||||
load_plugins,
|
||||
load_all_plugins,
|
||||
register_plugin_spec,
|
||||
)
|
||||
|
||||
from yt_dlp.globals import (
|
||||
extractors,
|
||||
postprocessors,
|
||||
plugin_dirs,
|
||||
plugin_ies,
|
||||
plugin_pps,
|
||||
all_plugins_loaded,
|
||||
plugin_specs,
|
||||
)
|
||||
|
||||
|
||||
EXTRACTOR_PLUGIN_SPEC = PluginSpec(
|
||||
module_name='extractor',
|
||||
suffix='IE',
|
||||
destination=extractors,
|
||||
plugin_destination=plugin_ies,
|
||||
)
|
||||
|
||||
POSTPROCESSOR_PLUGIN_SPEC = PluginSpec(
|
||||
module_name='postprocessor',
|
||||
suffix='PP',
|
||||
destination=postprocessors,
|
||||
plugin_destination=plugin_pps,
|
||||
)
|
||||
|
||||
|
||||
def reset_plugins():
|
||||
plugin_ies.value = {}
|
||||
plugin_pps.value = {}
|
||||
plugin_dirs.value = ['default']
|
||||
plugin_specs.value = {}
|
||||
all_plugins_loaded.value = False
|
||||
# Clearing override plugins is probably difficult
|
||||
for module_name in tuple(sys.modules):
|
||||
for plugin_type in ('extractor', 'postprocessor'):
|
||||
if module_name.startswith(f'{PACKAGE_NAME}.{plugin_type}.'):
|
||||
del sys.modules[module_name]
|
||||
|
||||
importlib.invalidate_caches()
|
||||
|
||||
|
||||
class TestPlugins(unittest.TestCase):
|
||||
|
||||
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||
|
||||
def setUp(self):
|
||||
reset_plugins()
|
||||
|
||||
def tearDown(self):
|
||||
reset_plugins()
|
||||
|
||||
def test_directories_containing_plugins(self):
|
||||
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||
|
||||
def test_extractor_classes(self):
|
||||
for module_name in tuple(sys.modules):
|
||||
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
||||
del sys.modules[module_name]
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||
|
||||
# don't load modules with underscore prefix
|
||||
self.assertFalse(
|
||||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
|
||||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules,
|
||||
'loaded module beginning with underscore')
|
||||
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('IgnorePluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load extractors with underscore prefix
|
||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load extractors not specified in __all__ (if supplied)
|
||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugin_ies.value)
|
||||
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||
self.assertIn('InAllPluginIE', plugin_ies.value)
|
||||
|
||||
# Don't load override extractors
|
||||
self.assertNotIn('OverrideGenericIE', plugins_ie.keys())
|
||||
self.assertNotIn('OverrideGenericIE', plugin_ies.value)
|
||||
self.assertNotIn('_UnderscoreOverrideGenericIE', plugins_ie.keys())
|
||||
self.assertNotIn('_UnderscoreOverrideGenericIE', plugin_ies.value)
|
||||
|
||||
def test_postprocessor_classes(self):
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
self.assertIn('NormalPluginPP', plugin_pps.value)
|
||||
|
||||
def test_importing_zipped_module(self):
|
||||
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||
@@ -57,10 +119,10 @@ def test_importing_zipped_module(self):
|
||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||
|
||||
finally:
|
||||
@@ -68,6 +130,117 @@ def test_importing_zipped_module(self):
|
||||
os.remove(zip_path)
|
||||
importlib.invalidate_caches() # reset the import caches
|
||||
|
||||
def test_reloading_plugins(self):
|
||||
reload_plugins_path = TEST_DATA_DIR / 'reload_plugins'
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
# Remove default folder and add reload_plugin path
|
||||
sys.path.remove(str(TEST_DATA_DIR))
|
||||
sys.path.append(str(reload_plugins_path))
|
||||
importlib.invalidate_caches()
|
||||
try:
|
||||
for plugin_type in ('extractor', 'postprocessor'):
|
||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||
self.assertIn(reload_plugins_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||
|
||||
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||
self.assertTrue(
|
||||
plugins_ie['NormalPluginIE'].REPLACED,
|
||||
msg='Reloading has not replaced original extractor plugin')
|
||||
self.assertTrue(
|
||||
extractors.value['NormalPluginIE'].REPLACED,
|
||||
msg='Reloading has not replaced original extractor plugin globally')
|
||||
|
||||
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||
self.assertTrue(plugins_pp['NormalPluginPP'].REPLACED,
|
||||
msg='Reloading has not replaced original postprocessor plugin')
|
||||
self.assertTrue(
|
||||
postprocessors.value['NormalPluginPP'].REPLACED,
|
||||
msg='Reloading has not replaced original postprocessor plugin globally')
|
||||
|
||||
finally:
|
||||
sys.path.remove(str(reload_plugins_path))
|
||||
sys.path.append(str(TEST_DATA_DIR))
|
||||
importlib.invalidate_caches()
|
||||
|
||||
def test_extractor_override_plugin(self):
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
from yt_dlp.extractor.generic import GenericIE
|
||||
|
||||
self.assertEqual(GenericIE.TEST_FIELD, 'override')
|
||||
self.assertEqual(GenericIE.SECONDARY_TEST_FIELD, 'underscore-override')
|
||||
|
||||
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||
importlib.invalidate_caches()
|
||||
# test that loading a second time doesn't wrap a second time
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
from yt_dlp.extractor.generic import GenericIE
|
||||
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||
|
||||
def test_load_all_plugin_types(self):
|
||||
|
||||
# no plugin specs registered
|
||||
load_all_plugins()
|
||||
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
load_all_plugins()
|
||||
self.assertTrue(all_plugins_loaded.value)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
def test_no_plugin_dirs(self):
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
plugin_dirs.value = []
|
||||
load_all_plugins()
|
||||
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||
|
||||
def test_set_plugin_dirs(self):
|
||||
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||
plugin_dirs.value = [custom_plugin_dir]
|
||||
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||
|
||||
def test_invalid_plugin_dir(self):
|
||||
plugin_dirs.value = ['invalid_dir']
|
||||
with self.assertRaises(ValueError):
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
def test_append_plugin_dirs(self):
|
||||
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||
|
||||
self.assertEqual(plugin_dirs.value, ['default'])
|
||||
plugin_dirs.value.append(custom_plugin_dir)
|
||||
self.assertEqual(plugin_dirs.value, ['default', custom_plugin_dir])
|
||||
|
||||
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||
|
||||
def test_get_plugin_spec(self):
|
||||
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||
|
||||
self.assertEqual(plugin_specs.value.get('extractor'), EXTRACTOR_PLUGIN_SPEC)
|
||||
self.assertEqual(plugin_specs.value.get('postprocessor'), POSTPROCESSOR_PLUGIN_SPEC)
|
||||
self.assertIsNone(plugin_specs.value.get('invalid'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -59,7 +59,7 @@ def hook_two(self, filename):
|
||||
|
||||
def hook_three(self, filename):
|
||||
self.files.append(filename)
|
||||
raise Exception('Test exception for \'%s\'' % filename)
|
||||
raise Exception(f'Test exception for \'{filename}\'')
|
||||
|
||||
def tearDown(self):
|
||||
for f in self.files:
|
||||
|
||||
@@ -8,8 +8,10 @@
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import subprocess
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_shlex_quote
|
||||
from yt_dlp.utils import shell_quote
|
||||
from yt_dlp.postprocessor import (
|
||||
ExecPP,
|
||||
FFmpegThumbnailsConvertorPP,
|
||||
@@ -47,7 +49,18 @@ def test_escaping(self):
|
||||
print('Skipping: ffmpeg not found')
|
||||
return
|
||||
|
||||
file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
|
||||
test_data_dir = 'test/testdata/thumbnails'
|
||||
generated_file = f'{test_data_dir}/empty.webp'
|
||||
|
||||
subprocess.check_call([
|
||||
pp.executable, '-y', '-f', 'lavfi', '-i', 'color=c=black:s=320x320',
|
||||
'-c:v', 'libwebp', '-pix_fmt', 'yuv420p', '-vframes', '1', generated_file,
|
||||
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
file = test_data_dir + '/foo %d bar/foo_%d.{}'
|
||||
initial_file = file.format('webp')
|
||||
os.replace(generated_file, initial_file)
|
||||
|
||||
tests = (('webp', 'png'), ('png', 'jpg'))
|
||||
|
||||
for inp, out in tests:
|
||||
@@ -55,17 +68,19 @@ def test_escaping(self):
|
||||
if os.path.exists(out_file):
|
||||
os.remove(out_file)
|
||||
pp.convert_thumbnail(file.format(inp), out)
|
||||
assert os.path.exists(out_file)
|
||||
self.assertTrue(os.path.exists(out_file))
|
||||
|
||||
for _, out in tests:
|
||||
os.remove(file.format(out))
|
||||
|
||||
os.remove(initial_file)
|
||||
|
||||
|
||||
class TestExec(unittest.TestCase):
|
||||
def test_parse_cmd(self):
|
||||
pp = ExecPP(YoutubeDL(), '')
|
||||
info = {'filepath': 'file name'}
|
||||
cmd = 'echo %s' % compat_shlex_quote(info['filepath'])
|
||||
cmd = 'echo {}'.format(shell_quote(info['filepath']))
|
||||
|
||||
self.assertEqual(pp.parse_cmd('echo', info), cmd)
|
||||
self.assertEqual(pp.parse_cmd('echo {}', info), cmd)
|
||||
@@ -100,7 +115,7 @@ def _chapters(self, ends, titles):
|
||||
self.assertEqual(len(ends), len(titles))
|
||||
start = 0
|
||||
chapters = []
|
||||
for e, t in zip(ends, titles):
|
||||
for e, t in zip(ends, titles, strict=True):
|
||||
chapters.append(self._chapter(start, e, t))
|
||||
start = e
|
||||
return chapters
|
||||
@@ -125,7 +140,8 @@ def test_remove_marked_arrange_sponsors_CanGetThroughUnaltered(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, chapters, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||
self._sponsor_chapter(30, 40, 'preview'),
|
||||
self._sponsor_chapter(50, 60, 'filler')]
|
||||
@@ -136,7 +152,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
||||
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
||||
self._sponsor_chapter(30, 40, 'preview'),
|
||||
@@ -149,10 +166,14 @@ def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
||||
chapters = self._chapters([120], ['c']) + [
|
||||
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||
self._sponsor_chapter(50, 70, 'sponsor'), self._sponsor_chapter(60, 85, 'selfpromo'),
|
||||
self._sponsor_chapter(90, 120, 'selfpromo'), self._sponsor_chapter(100, 110, 'sponsor')]
|
||||
chapters = [
|
||||
*self._chapters([120], ['c']),
|
||||
self._sponsor_chapter(10, 45, 'sponsor'),
|
||||
self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||
self._sponsor_chapter(50, 70, 'sponsor'),
|
||||
self._sponsor_chapter(60, 85, 'selfpromo'),
|
||||
self._sponsor_chapter(90, 120, 'selfpromo'),
|
||||
self._sponsor_chapter(100, 110, 'sponsor')]
|
||||
expected = self._chapters(
|
||||
[10, 20, 40, 45, 50, 60, 70, 85, 90, 100, 110, 120],
|
||||
['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
|
||||
@@ -172,7 +193,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithCuts(self):
|
||||
chapters, self._chapters([40], ['c']), cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||
self._sponsor_chapter(30, 40, 'selfpromo', remove=True),
|
||||
self._sponsor_chapter(50, 60, 'interaction')]
|
||||
@@ -185,24 +207,29 @@ def test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts(self):
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle(self):
|
||||
cuts = [self._sponsor_chapter(20, 30, 'selfpromo', remove=True),
|
||||
self._chapter(40, 50, remove=True)]
|
||||
chapters = self._chapters([70], ['c']) + [self._sponsor_chapter(10, 60, 'sponsor')] + cuts
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||
*cuts]
|
||||
expected = self._chapters(
|
||||
[10, 40, 50], ['c', '[SponsorBlock]: Sponsor', 'c'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
||||
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
||||
chapters = self._chapters([60], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([60], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'intro'),
|
||||
self._sponsor_chapter(30, 40, 'sponsor'),
|
||||
self._sponsor_chapter(50, 60, 'outro'),
|
||||
] + cuts
|
||||
*cuts]
|
||||
expected = self._chapters(
|
||||
[10, 20, 30], ['c', '[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||
self._sponsor_chapter(20, 30, 'selfpromo'),
|
||||
self._sponsor_chapter(30, 40, 'interaction')]
|
||||
@@ -213,7 +240,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
||||
self._chapter(30, 40, remove=True),
|
||||
@@ -226,7 +254,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
|
||||
chapters, expected, [self._chapter(20, 50, remove=True)])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||
self._sponsor_chapter(20, 50, 'selfpromo'),
|
||||
self._sponsor_chapter(40, 60, 'interaction')]
|
||||
@@ -238,7 +267,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 30, 'sponsor', remove=True),
|
||||
self._sponsor_chapter(20, 50, 'selfpromo', remove=True),
|
||||
self._sponsor_chapter(40, 60, 'interaction', remove=True)]
|
||||
@@ -246,7 +276,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts(self):
|
||||
chapters, self._chapters([20], ['c']), [self._chapter(10, 60, remove=True)])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors(self):
|
||||
chapters = self._chapters([170], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([170], ['c']),
|
||||
self._sponsor_chapter(0, 30, 'intro'),
|
||||
self._sponsor_chapter(20, 50, 'sponsor'),
|
||||
self._sponsor_chapter(40, 60, 'selfpromo'),
|
||||
@@ -267,7 +298,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors(sel
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts(self):
|
||||
chapters = self._chapters([170], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([170], ['c']),
|
||||
self._chapter(0, 30, remove=True),
|
||||
self._sponsor_chapter(20, 50, 'sponsor', remove=True),
|
||||
self._chapter(40, 60, remove=True),
|
||||
@@ -284,7 +316,8 @@ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts(self):
|
||||
chapters, self._chapters([20], ['c']), expected_cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterCut(self):
|
||||
chapters = self._chapters([60], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([60], ['c']),
|
||||
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||
self._sponsor_chapter(10, 40, 'intro'),
|
||||
self._sponsor_chapter(30, 50, 'interaction'),
|
||||
@@ -297,7 +330,8 @@ def test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterC
|
||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||
self._sponsor_chapter(20, 50, 'interaction'),
|
||||
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
||||
@@ -310,7 +344,8 @@ def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
|
||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([70], ['c']),
|
||||
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||
self._sponsor_chapter(20, 60, 'interaction'),
|
||||
self._sponsor_chapter(30, 50, 'selfpromo', remove=True)]
|
||||
@@ -321,7 +356,8 @@ def test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut(self):
|
||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndCuts(self):
|
||||
chapters = self._chapters([200], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([200], ['c']),
|
||||
self._sponsor_chapter(10, 40, 'sponsor'),
|
||||
self._sponsor_chapter(10, 30, 'intro'),
|
||||
self._chapter(20, 30, remove=True),
|
||||
@@ -347,8 +383,9 @@ def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndC
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, expected_cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorOverlapsMultipleChapters(self):
|
||||
chapters = (self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5'])
|
||||
+ [self._sponsor_chapter(10, 90, 'sponsor')])
|
||||
chapters = [
|
||||
*self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5']),
|
||||
self._sponsor_chapter(10, 90, 'sponsor')]
|
||||
expected = self._chapters([10, 90, 100], ['c1', '[SponsorBlock]: Sponsor', 'c5'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
@@ -359,9 +396,10 @@ def test_remove_marked_arrange_sponsors_CutOverlapsMultipleChapters(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorsWithinSomeChaptersAndOverlappingOthers(self):
|
||||
chapters = (self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4'])
|
||||
+ [self._sponsor_chapter(20, 30, 'sponsor'),
|
||||
self._sponsor_chapter(50, 70, 'selfpromo')])
|
||||
chapters = [
|
||||
*self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4']),
|
||||
self._sponsor_chapter(20, 30, 'sponsor'),
|
||||
self._sponsor_chapter(50, 70, 'selfpromo')]
|
||||
expected = self._chapters([10, 20, 30, 40, 50, 70, 80],
|
||||
['c1', 'c2', '[SponsorBlock]: Sponsor', 'c2', 'c3',
|
||||
'[SponsorBlock]: Unpaid/Self Promotion', 'c4'])
|
||||
@@ -374,8 +412,9 @@ def test_remove_marked_arrange_sponsors_CutsWithinSomeChaptersAndOverlappingOthe
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChaptersAfterLastSponsor(self):
|
||||
chapters = (self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4'])
|
||||
+ [self._sponsor_chapter(10, 30, 'music_offtopic')])
|
||||
chapters = [
|
||||
*self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4']),
|
||||
self._sponsor_chapter(10, 30, 'music_offtopic')]
|
||||
expected = self._chapters(
|
||||
[10, 30, 40, 50, 60],
|
||||
['c1', '[SponsorBlock]: Non-Music Section', 'c2', 'c3', 'c4'])
|
||||
@@ -388,8 +427,9 @@ def test_remove_marked_arrange_sponsors_ChaptersAfterLastCut(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorStartsAtChapterStart(self):
|
||||
chapters = (self._chapters([10, 20, 40], ['c1', 'c2', 'c3'])
|
||||
+ [self._sponsor_chapter(20, 30, 'sponsor')])
|
||||
chapters = [
|
||||
*self._chapters([10, 20, 40], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(20, 30, 'sponsor')]
|
||||
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
@@ -400,8 +440,9 @@ def test_remove_marked_arrange_sponsors_CutStartsAtChapterStart(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorEndsAtChapterEnd(self):
|
||||
chapters = (self._chapters([10, 30, 40], ['c1', 'c2', 'c3'])
|
||||
+ [self._sponsor_chapter(20, 30, 'sponsor')])
|
||||
chapters = [
|
||||
*self._chapters([10, 30, 40], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(20, 30, 'sponsor')]
|
||||
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
@@ -412,8 +453,9 @@ def test_remove_marked_arrange_sponsors_CutEndsAtChapterEnd(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorCoincidesWithChapters(self):
|
||||
chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
|
||||
+ [self._sponsor_chapter(10, 30, 'sponsor')])
|
||||
chapters = [
|
||||
*self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']),
|
||||
self._sponsor_chapter(10, 30, 'sponsor')]
|
||||
expected = self._chapters([10, 30, 40], ['c1', '[SponsorBlock]: Sponsor', 'c4'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
@@ -424,8 +466,9 @@ def test_remove_marked_arrange_sponsors_CutCoincidesWithChapters(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorsAtVideoBoundaries(self):
|
||||
chapters = (self._chapters([20, 40, 60], ['c1', 'c2', 'c3'])
|
||||
+ [self._sponsor_chapter(0, 10, 'intro'), self._sponsor_chapter(50, 60, 'outro')])
|
||||
chapters = [
|
||||
*self._chapters([20, 40, 60], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(0, 10, 'intro'), self._sponsor_chapter(50, 60, 'outro')]
|
||||
expected = self._chapters(
|
||||
[10, 20, 40, 50, 60], ['[SponsorBlock]: Intermission/Intro Animation', 'c1', 'c2', 'c3', '[SponsorBlock]: Endcards/Credits'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
@@ -437,8 +480,10 @@ def test_remove_marked_arrange_sponsors_CutsAtVideoBoundaries(self):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorsOverlapChaptersAtVideoBoundaries(self):
|
||||
chapters = (self._chapters([10, 40, 50], ['c1', 'c2', 'c3'])
|
||||
+ [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(30, 50, 'outro')])
|
||||
chapters = [
|
||||
*self._chapters([10, 40, 50], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(0, 20, 'intro'),
|
||||
self._sponsor_chapter(30, 50, 'outro')]
|
||||
expected = self._chapters(
|
||||
[20, 30, 50], ['[SponsorBlock]: Intermission/Intro Animation', 'c2', '[SponsorBlock]: Endcards/Credits'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
@@ -450,8 +495,10 @@ def test_remove_marked_arrange_sponsors_CutsOverlapChaptersAtVideoBoundaries(sel
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_EverythingSponsored(self):
|
||||
chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
|
||||
+ [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(20, 40, 'outro')])
|
||||
chapters = [
|
||||
*self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']),
|
||||
self._sponsor_chapter(0, 20, 'intro'),
|
||||
self._sponsor_chapter(20, 40, 'outro')]
|
||||
expected = self._chapters([20, 40], ['[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
@@ -491,38 +538,39 @@ def test_remove_marked_arrange_sponsors_TinyChapterAtTheStartPrependedToTheNext(
|
||||
chapters, self._chapters([2.5], ['c2']), cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_TinyChaptersResultingFromSponsorOverlapAreIgnored(self):
|
||||
chapters = self._chapters([1, 3, 4], ['c1', 'c2', 'c3']) + [
|
||||
chapters = [
|
||||
*self._chapters([1, 3, 4], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(1.5, 2.5, 'sponsor')]
|
||||
self._remove_marked_arrange_sponsors_test_impl(
|
||||
chapters, self._chapters([1.5, 2.5, 4], ['c1', '[SponsorBlock]: Sponsor', 'c3']), [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_TinySponsorsOverlapsAreIgnored(self):
|
||||
chapters = self._chapters([2, 3, 5], ['c1', 'c2', 'c3']) + [
|
||||
chapters = [
|
||||
*self._chapters([2, 3, 5], ['c1', 'c2', 'c3']),
|
||||
self._sponsor_chapter(1, 3, 'sponsor'),
|
||||
self._sponsor_chapter(2.5, 4, 'selfpromo')
|
||||
]
|
||||
self._sponsor_chapter(2.5, 4, 'selfpromo')]
|
||||
self._remove_marked_arrange_sponsors_test_impl(
|
||||
chapters, self._chapters([1, 3, 4, 5], [
|
||||
'c1', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion', 'c3']), [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_TinySponsorsPrependedToTheNextSponsor(self):
|
||||
chapters = self._chapters([4], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([4], ['c']),
|
||||
self._sponsor_chapter(1.5, 2, 'sponsor'),
|
||||
self._sponsor_chapter(2, 4, 'selfpromo')
|
||||
]
|
||||
self._sponsor_chapter(2, 4, 'selfpromo')]
|
||||
self._remove_marked_arrange_sponsors_test_impl(
|
||||
chapters, self._chapters([1.5, 4], ['c', '[SponsorBlock]: Unpaid/Self Promotion']), [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SmallestSponsorInTheOverlapGetsNamed(self):
|
||||
self._pp._sponsorblock_chapter_title = '[SponsorBlock]: %(name)s'
|
||||
chapters = self._chapters([10], ['c']) + [
|
||||
chapters = [
|
||||
*self._chapters([10], ['c']),
|
||||
self._sponsor_chapter(2, 8, 'sponsor'),
|
||||
self._sponsor_chapter(4, 6, 'selfpromo')
|
||||
]
|
||||
self._sponsor_chapter(4, 6, 'selfpromo')]
|
||||
self._remove_marked_arrange_sponsors_test_impl(
|
||||
chapters, self._chapters([2, 4, 6, 8, 10], [
|
||||
'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion',
|
||||
'[SponsorBlock]: Sponsor', 'c'
|
||||
'[SponsorBlock]: Sponsor', 'c',
|
||||
]), [])
|
||||
|
||||
def test_make_concat_opts_CommonCase(self):
|
||||
@@ -577,3 +625,7 @@ def test_quote_for_concat_QuotesAtEnd(self):
|
||||
self.assertEqual(
|
||||
r"'special '\'' characters '\'' galore'\'\'\'",
|
||||
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
71
test/test_pot/conftest.py
Normal file
71
test/test_pot/conftest.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import collections
|
||||
|
||||
import pytest
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
|
||||
class MockLogger(IEContentProviderLogger):
|
||||
|
||||
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.messages = collections.defaultdict(list)
|
||||
|
||||
def trace(self, message: str):
|
||||
self.messages['trace'].append(message)
|
||||
|
||||
def debug(self, message: str):
|
||||
self.messages['debug'].append(message)
|
||||
|
||||
def info(self, message: str):
|
||||
self.messages['info'].append(message)
|
||||
|
||||
def warning(self, message: str, *, once=False):
|
||||
self.messages['warning'].append(message)
|
||||
|
||||
def error(self, message: str):
|
||||
self.messages['error'].append(message)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ie() -> InfoExtractor:
|
||||
ydl = YoutubeDL()
|
||||
return ydl.get_info_extractor('Youtube')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logger() -> MockLogger:
|
||||
return MockLogger()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request() -> PoTokenRequest:
|
||||
return PoTokenRequest(
|
||||
context=PoTokenContext.GVS,
|
||||
innertube_context={'client': {'clientName': 'WEB'}},
|
||||
innertube_host='youtube.com',
|
||||
session_index=None,
|
||||
player_url=None,
|
||||
is_authenticated=False,
|
||||
video_webpage=None,
|
||||
|
||||
visitor_data='example-visitor-data',
|
||||
data_sync_id='example-data-sync-id',
|
||||
video_id='example-video-id',
|
||||
|
||||
request_cookiejar=YoutubeDLCookieJar(),
|
||||
request_proxy=None,
|
||||
request_headers=HTTPHeaderDict(),
|
||||
request_timeout=None,
|
||||
request_source_address=None,
|
||||
request_verify_tls=True,
|
||||
|
||||
bypass_cache=False,
|
||||
)
|
||||
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import threading
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||
from yt_dlp.version import __version__
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||
|
||||
|
||||
class TestMemoryLRUPCS:
|
||||
|
||||
def test_base_type(self):
|
||||
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||
|
||||
@pytest.fixture
|
||||
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||
|
||||
def test_initialization(self, pcp):
|
||||
assert pcp.PROVIDER_NAME == 'memory'
|
||||
assert pcp.PROVIDER_VERSION == __version__
|
||||
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcp.is_available()
|
||||
|
||||
def test_store_and_get(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_store_ignore_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_store_override_existing_key(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||
assert len(pcp.cache) == 2
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value2'
|
||||
|
||||
def test_store_ignore_expired_existing_key(self, pcp):
|
||||
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value2'
|
||||
assert len(pcp.cache) == 1
|
||||
|
||||
def test_get_key_expired(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||
assert pcp.get('key1') == 'value1'
|
||||
assert len(pcp.cache) == 1
|
||||
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||
assert pcp.get('key1') is None
|
||||
assert len(pcp.cache) == 0
|
||||
|
||||
def test_lru_eviction(self, ie, logger):
|
||||
MAX_SIZE = 2
|
||||
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') == 'value1'
|
||||
|
||||
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key2') is None
|
||||
|
||||
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||
assert len(provider.cache) == 2
|
||||
|
||||
assert provider.get('key1') is None
|
||||
assert provider.get('key3') == 'value3'
|
||||
assert provider.get('key4') == 'value4'
|
||||
|
||||
def test_delete(self, pcp):
|
||||
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||
assert len(pcp.cache) == 1
|
||||
assert pcp.get('key1') == 'value1'
|
||||
pcp.delete('key1')
|
||||
assert len(pcp.cache) == 0
|
||||
assert pcp.get('key1') is None
|
||||
|
||||
def test_use_global_cache_default(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||
|
||||
def test_fail_max_size_change_global(self, ie, logger):
|
||||
pcp = MemoryLRUPCP(ie, logger, {})
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||
initialize_global_cache(50)
|
||||
|
||||
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||
|
||||
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||
assert memorylru_preference(pcp, pot_request) == 10000
|
||||
52
test/test_pot/test_pot_builtin_utils.py
Normal file
52
test/test_pot/test_pot_builtin_utils.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import pytest
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
|
||||
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||
|
||||
|
||||
class TestGetWebPoContentBinding:
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||
*[(client, context, is_authenticated, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, expected in [
|
||||
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||
])
|
||||
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert get_webpo_content_binding(pot_request) == expected
|
||||
|
||||
def test_extract_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||
|
||||
def test_invalid_visitor_id(self, pot_request):
|
||||
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_no_visitor_id(self, pot_request):
|
||||
pot_request.visitor_data = 'KIDiz6oG'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_invalid_base64(self, pot_request):
|
||||
pot_request.visitor_data = 'invalid-base64'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_gvs_video_id_binding_experiment(self, pot_request):
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request._gvs_bind_to_video_id = True
|
||||
assert get_webpo_content_binding(pot_request) == ('example-video-id', ContentBindingType.VIDEO_ID)
|
||||
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import pytest
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||
from yt_dlp.utils import bug_reports_message
|
||||
from yt_dlp.extractor.youtube.pot.provider import (
|
||||
PoTokenRequest,
|
||||
PoTokenContext,
|
||||
|
||||
)
|
||||
from yt_dlp.version import __version__
|
||||
|
||||
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def pot_request(pot_request) -> PoTokenRequest:
|
||||
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||
return pot_request
|
||||
|
||||
|
||||
class TestWebPoPCSP:
|
||||
def test_base_type(self):
|
||||
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||
|
||||
def test_init(self, ie, logger):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.PROVIDER_NAME == 'webpo'
|
||||
assert pcs.PROVIDER_VERSION == __version__
|
||||
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||
assert pcs.is_available()
|
||||
|
||||
def test_is_registered(self):
|
||||
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||
('ANDROID', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.GVS, False),
|
||||
('IOS', PoTokenContext.PLAYER, False),
|
||||
])
|
||||
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
assert pcs.generate_cache_spec(pot_request) is None
|
||||
|
||||
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||
]],
|
||||
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||
|
||||
])
|
||||
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.innertube_context['client']['clientName'] = client_name
|
||||
pot_request.context = context
|
||||
pot_request.is_authenticated = is_authenticated
|
||||
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||
pot_request.request_source_address = source_address
|
||||
pot_request.request_proxy = request_proxy
|
||||
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||
|
||||
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||
# Should not bind to visitor id if setting is set to False
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request.is_authenticated = False
|
||||
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||
|
||||
def test_default_ttl(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||
|
||||
def test_write_policy(self, ie, logger, pot_request):
|
||||
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||
pot_request.context = PoTokenContext.PLAYER
|
||||
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user