mirror of
https://github.com/yt-dlp/yt-dlp
synced 2025-12-18 23:25:42 +07:00
Compare commits
593 Commits
2022.08.14
...
2023.03.03
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
354d5fca7a | ||
|
|
9344964281 | ||
|
|
bfc861a91e | ||
|
|
fe2ce85aff | ||
|
|
d21056f4cf | ||
|
|
b2e0343ba0 | ||
|
|
4815bbfc41 | ||
|
|
776d1c3f0c | ||
|
|
12647e03d4 | ||
|
|
77df20f14c | ||
|
|
29cb20bd56 | ||
|
|
d400e261cf | ||
|
|
9acf1ee25f | ||
|
|
40d77d8902 | ||
|
|
2d5a8c5db2 | ||
|
|
77d6d13646 | ||
|
|
9fddc12ab0 | ||
|
|
b38cae49e6 | ||
|
|
7f51861b18 | ||
|
|
5b28cef72d | ||
|
|
31e183557f | ||
|
|
f34804b2f9 | ||
|
|
65f6e80780 | ||
|
|
b059188383 | ||
|
|
5038f6d713 | ||
|
|
4d248e29d2 | ||
|
|
8e9fe43cd3 | ||
|
|
43a3eaf963 | ||
|
|
cc09083636 | ||
|
|
da8e2912b1 | ||
|
|
18d295c9e0 | ||
|
|
17ca19ab60 | ||
|
|
41bd0dc4d7 | ||
|
|
a0a7c01542 | ||
|
|
45b2ee6f4f | ||
|
|
a538772969 | ||
|
|
30031be974 | ||
|
|
9acca71237 | ||
|
|
d50ea3ce5a | ||
|
|
c61cf091a5 | ||
|
|
f737fb16d8 | ||
|
|
5e1a54f63e | ||
|
|
31c279a2a2 | ||
|
|
a4ad59ff2d | ||
|
|
b25d6cb963 | ||
|
|
3616300155 | ||
|
|
e4a8b1769e | ||
|
|
da880559a6 | ||
|
|
65e5c021e7 | ||
|
|
a9189510ba | ||
|
|
10fd9e6ee8 | ||
|
|
72671a212d | ||
|
|
376aa24b15 | ||
|
|
c9d14bd22a | ||
|
|
149eb0bbf3 | ||
|
|
9ebac35577 | ||
|
|
8b37c58f8b | ||
|
|
d3bb187f01 | ||
|
|
44699d10dc | ||
|
|
a9c685453f | ||
|
|
c154302c58 | ||
|
|
5712943b76 | ||
|
|
39f32f1715 | ||
|
|
365b900605 | ||
|
|
c6b657867a | ||
|
|
a4f1683221 | ||
|
|
b6795fd310 | ||
|
|
2e269bd998 | ||
|
|
78a78fa74d | ||
|
|
0ba87dd279 | ||
|
|
05799a48c7 | ||
|
|
93abb7406b | ||
|
|
b23167e754 | ||
|
|
417cdaae08 | ||
|
|
b3eaab7ca2 | ||
|
|
a31d0fa6c3 | ||
|
|
cc2389c8ac | ||
|
|
20266508dd | ||
|
|
cc13293c28 | ||
|
|
989f47b631 | ||
|
|
7d5f919bad | ||
|
|
c62e64cf01 | ||
|
|
c085cc2def | ||
|
|
7708df8da0 | ||
|
|
b85faf6ffb | ||
|
|
203a06f855 | ||
|
|
6839ae1f6d | ||
|
|
c0cd13fb1c | ||
|
|
f14c233348 | ||
|
|
768a001781 | ||
|
|
acb1042a9f | ||
|
|
f40e32fb1a | ||
|
|
e61acb40b2 | ||
|
|
7e68567e50 | ||
|
|
f7efe6dc95 | ||
|
|
b1bde57bef | ||
|
|
88426d9446 | ||
|
|
f6a765ceb5 | ||
|
|
754c84e2e4 | ||
|
|
7aefd19afe | ||
|
|
fbbb5508ea | ||
|
|
c77df98b1a | ||
|
|
d27bde9883 | ||
|
|
0fe87a8730 | ||
|
|
3b161265ad | ||
|
|
389896df85 | ||
|
|
b032ff0f03 | ||
|
|
dad2210c0c | ||
|
|
9cfdbcbf3f | ||
|
|
7543c9c99b | ||
|
|
acacb57c7e | ||
|
|
776995bc10 | ||
|
|
8b008d6254 | ||
|
|
83c4970e52 | ||
|
|
8aa0bd5d10 | ||
|
|
37e325b92f | ||
|
|
59d7de0da5 | ||
|
|
88d8928bf7 | ||
|
|
176a068cde | ||
|
|
5ab3534d44 | ||
|
|
cb73b8460c | ||
|
|
7481998b16 | ||
|
|
87ebab0615 | ||
|
|
355d781bed | ||
|
|
7287ab92f6 | ||
|
|
6becd2508c | ||
|
|
edfc7725b1 | ||
|
|
b382c1fc6a | ||
|
|
8a6b167723 | ||
|
|
253ac4ba6a | ||
|
|
84e0e33a19 | ||
|
|
ab4cbeff00 | ||
|
|
773c272d66 | ||
|
|
c3366fdfd0 | ||
|
|
5be214abed | ||
|
|
d37422f1db | ||
|
|
933ed882e9 | ||
|
|
a1d9aca338 | ||
|
|
91d54e9b99 | ||
|
|
76c3ceccfb | ||
|
|
ad68b16a1e | ||
|
|
f079514957 | ||
|
|
e9df3d42c4 | ||
|
|
d80ca5deaa | ||
|
|
1a3cd8ec35 | ||
|
|
990dd7b00f | ||
|
|
d83b0ad809 | ||
|
|
08e29b9f1f | ||
|
|
8e174ba7de | ||
|
|
05997b6e98 | ||
|
|
32a84bcf4e | ||
|
|
8300774c4a | ||
|
|
d7f9871469 | ||
|
|
13f930abc0 | ||
|
|
b23b503e22 | ||
|
|
e756f45ba0 | ||
|
|
8c53322cda | ||
|
|
193fb150b7 | ||
|
|
26fdfc3704 | ||
|
|
78d25e0b7c | ||
|
|
2a06bb4eb6 | ||
|
|
88fb942577 | ||
|
|
1cdda32998 | ||
|
|
3e01ce744a | ||
|
|
8e40b9d1ec | ||
|
|
2fb0f85868 | ||
|
|
a0e526ed4d | ||
|
|
8d1ddb0805 | ||
|
|
9bb856998b | ||
|
|
fbb7383306 | ||
|
|
ec54bd43f3 | ||
|
|
f74371a97d | ||
|
|
d5f043d127 | ||
|
|
fe74d5b592 | ||
|
|
119e40ef64 | ||
|
|
4455918e7f | ||
|
|
efa944f4bc | ||
|
|
e107c2b8cf | ||
|
|
ca2f6e14e6 | ||
|
|
c1edb853b0 | ||
|
|
2647c933b8 | ||
|
|
53006b35ea | ||
|
|
4b183d4962 | ||
|
|
3d667e0047 | ||
|
|
9a9006ba20 | ||
|
|
153e88a751 | ||
|
|
9fcd8ad1f2 | ||
|
|
6b71d186dd | ||
|
|
074b2fae90 | ||
|
|
06a9d68eb8 | ||
|
|
a4d6ead30f | ||
|
|
d1b5f3d79c | ||
|
|
da8d2de208 | ||
|
|
15e9e578c0 | ||
|
|
0ef3d47027 | ||
|
|
247c8dd4f5 | ||
|
|
032f22020c | ||
|
|
4af47a0003 | ||
|
|
9012d20b23 | ||
|
|
d61ef7f343 | ||
|
|
1c226ccdd4 | ||
|
|
8791e78ccc | ||
|
|
69f5fe45b9 | ||
|
|
0b5546c723 | ||
|
|
1fc089143c | ||
|
|
5424dbaf91 | ||
|
|
c733555106 | ||
|
|
81388c0954 | ||
|
|
df10bad267 | ||
|
|
f0f3fa028b | ||
|
|
22697a84f6 | ||
|
|
3ac5476430 | ||
|
|
e318b5b87a | ||
|
|
f549b18512 | ||
|
|
7c5e1701f6 | ||
|
|
16bed382fd | ||
|
|
3cf50fa8e9 | ||
|
|
f69b0554eb | ||
|
|
e74a3c6dcc | ||
|
|
7108221662 | ||
|
|
10dc85924a | ||
|
|
b05f0a50e0 | ||
|
|
3d79ebc8b7 | ||
|
|
b44cd29851 | ||
|
|
85a802969e | ||
|
|
72f96c5566 | ||
|
|
839e2a62ae | ||
|
|
28b8f57b4b | ||
|
|
dfc186d422 | ||
|
|
42ec478fc4 | ||
|
|
7991ae57a8 | ||
|
|
935bac1e4d | ||
|
|
c4cbd3bebd | ||
|
|
c53a18f016 | ||
|
|
71df9b7fd5 | ||
|
|
c9f5ce5118 | ||
|
|
ddf1e22d48 | ||
|
|
0e96b408b9 | ||
|
|
ba72399723 | ||
|
|
9bcfe33be7 | ||
|
|
71eb82d1b2 | ||
|
|
a9d069f5b8 | ||
|
|
48652590ec | ||
|
|
86f557b636 | ||
|
|
c0caa80515 | ||
|
|
0d95d8b00a | ||
|
|
9d52bf65ff | ||
|
|
d761dfd059 | ||
|
|
27c0f899c8 | ||
|
|
7ff2fafe47 | ||
|
|
3b021eacef | ||
|
|
f352a09778 | ||
|
|
02b2f9fa7d | ||
|
|
29ca408219 | ||
|
|
8486540257 | ||
|
|
ed027fd9d8 | ||
|
|
352e7d9873 | ||
|
|
9a0416c6a5 | ||
|
|
f5a9e9df0d | ||
|
|
f96a3fb7d3 | ||
|
|
bc87dac75f | ||
|
|
9f14daf22b | ||
|
|
784320c98c | ||
|
|
d0d74b7197 | ||
|
|
64c464a144 | ||
|
|
4de88a6a36 | ||
|
|
105bfd90f5 | ||
|
|
6368e2e639 | ||
|
|
a4894d3e25 | ||
|
|
d7b460d0e5 | ||
|
|
171a31dbe8 | ||
|
|
83cc7b8aae | ||
|
|
0a4b2f4180 | ||
|
|
a8c754cc00 | ||
|
|
bc5c2f8a2c | ||
|
|
d965856235 | ||
|
|
08270da5c3 | ||
|
|
5e39fb982e | ||
|
|
8b644025b1 | ||
|
|
7aaf4cd2a8 | ||
|
|
8522226d2f | ||
|
|
f4b2c59cfe | ||
|
|
7c8c63529e | ||
|
|
e4221b700f | ||
|
|
bd7e919a75 | ||
|
|
f7fc8d39e9 | ||
|
|
a6858cda29 | ||
|
|
17fc3dc48a | ||
|
|
3f5c216969 | ||
|
|
e72e48c53f | ||
|
|
0cf643b234 | ||
|
|
dc3028d233 | ||
|
|
4dc23a8051 | ||
|
|
495322b95b | ||
|
|
c789fb7787 | ||
|
|
ed6bec168d | ||
|
|
0d8affc17f | ||
|
|
d9df9b4919 | ||
|
|
efdc45a6ea | ||
|
|
86973308cd | ||
|
|
c61473c1d6 | ||
|
|
8fddc232bf | ||
|
|
fad689c7b6 | ||
|
|
db6fa6960c | ||
|
|
3b87f4d943 | ||
|
|
581e86b512 | ||
|
|
8196182a12 | ||
|
|
9b383177c9 | ||
|
|
fbb0ee7747 | ||
|
|
c7e4ab278a | ||
|
|
e9ce4e9250 | ||
|
|
5da08bde9e | ||
|
|
ff48fc04d0 | ||
|
|
46d09f8707 | ||
|
|
db4678e448 | ||
|
|
a349d4d641 | ||
|
|
ac8e69dd32 | ||
|
|
96b9e9cf62 | ||
|
|
cb1553e966 | ||
|
|
0d2a0ecac3 | ||
|
|
c94df4d19d | ||
|
|
728f4b5c2e | ||
|
|
8c188d5d09 | ||
|
|
e14ea7fbd9 | ||
|
|
7053aa3a48 | ||
|
|
049565df2e | ||
|
|
cc1d3bf96b | ||
|
|
5b9f253fa0 | ||
|
|
d715b0e413 | ||
|
|
6141346d18 | ||
|
|
59a0c35865 | ||
|
|
da9a60ca0d | ||
|
|
0d113603ac | ||
|
|
2e30b46fe4 | ||
|
|
68a9a450d4 | ||
|
|
ed13a772d7 | ||
|
|
78545664bf | ||
|
|
f72218c199 | ||
|
|
58fb927ebd | ||
|
|
62b8dac490 | ||
|
|
682b4524bf | ||
|
|
9da6612b0f | ||
|
|
e63faa101c | ||
|
|
497074f044 | ||
|
|
c90c5b9bdd | ||
|
|
ad97487606 | ||
|
|
e091fb92da | ||
|
|
c9bd65185c | ||
|
|
c66ed4e2e5 | ||
|
|
2530b68d44 | ||
|
|
7d61d2306e | ||
|
|
385adffcf5 | ||
|
|
0c908911f9 | ||
|
|
c13a301a94 | ||
|
|
f47cf86eff | ||
|
|
7a26ce2641 | ||
|
|
3639df54c3 | ||
|
|
a4713ba96d | ||
|
|
5318156f1c | ||
|
|
d5d1df8afd | ||
|
|
cd5df121f3 | ||
|
|
73ac0e6b85 | ||
|
|
a7ddbc0475 | ||
|
|
8fab23301c | ||
|
|
1338ae3ba3 | ||
|
|
63c547d71c | ||
|
|
814bba3933 | ||
|
|
2576d53a31 | ||
|
|
217753f4aa | ||
|
|
42a44f01c3 | ||
|
|
9b9dad119a | ||
|
|
6dca2aa66d | ||
|
|
6678a4f0b3 | ||
|
|
d51b2816e3 | ||
|
|
34f00179db | ||
|
|
5225df50cf | ||
|
|
94dc8604dd | ||
|
|
a71b812f53 | ||
|
|
c6989aa3ae | ||
|
|
a79bf78397 | ||
|
|
82fb2357d9 | ||
|
|
13b2ae29c2 | ||
|
|
36069409ec | ||
|
|
0468a3b325 | ||
|
|
d509c1f5a3 | ||
|
|
2c98d99818 | ||
|
|
226c0f3a54 | ||
|
|
ade1fa70cb | ||
|
|
4c9a1a3ba5 | ||
|
|
1d55ebabc9 | ||
|
|
f324fe8c59 | ||
|
|
866f037344 | ||
|
|
5d14b73491 | ||
|
|
540236ce11 | ||
|
|
7b0127e1e1 | ||
|
|
f99bbfc983 | ||
|
|
3b55aaac59 | ||
|
|
2e565f5bca | ||
|
|
e02e6d86db | ||
|
|
867c66ff97 | ||
|
|
f03940963e | ||
|
|
09c127ff83 | ||
|
|
aebb4f4ba7 | ||
|
|
bf2e1ec67a | ||
|
|
98d4ec1ef2 | ||
|
|
1305b659ef | ||
|
|
57fb88093e | ||
|
|
4e0511f27d | ||
|
|
304ad45a9b | ||
|
|
878eac3e2e | ||
|
|
34859e4b32 | ||
|
|
143a2ccab3 | ||
|
|
1e0daeb314 | ||
|
|
7f5b3cb8b3 | ||
|
|
c53e5cf59f | ||
|
|
c7f540ea1e | ||
|
|
12f153a827 | ||
|
|
0d887f273a | ||
|
|
4d37720a0c | ||
|
|
dd4411aac2 | ||
|
|
1d77d8ce07 | ||
|
|
a057779d5e | ||
|
|
7474e4531e | ||
|
|
d3a3d7f0cc | ||
|
|
8671f995cc | ||
|
|
4a61501db9 | ||
|
|
7244895bde | ||
|
|
177662e0f2 | ||
|
|
f48ab881f6 | ||
|
|
eb2d9504b9 | ||
|
|
8a04054647 | ||
|
|
8b7fb8b60d | ||
|
|
a83333c432 | ||
|
|
573a98d6f0 | ||
|
|
af7a5eef2f | ||
|
|
576faf00b2 | ||
|
|
81b6102d20 | ||
|
|
acf306d1f9 | ||
|
|
20a7304e4c | ||
|
|
2e0f8d4f6e | ||
|
|
7e378287c4 | ||
|
|
9cc5aed990 | ||
|
|
48f535f5f8 | ||
|
|
8dbad2a439 | ||
|
|
11398b922c | ||
|
|
dfea94f8f6 | ||
|
|
f1aae71568 | ||
|
|
a5642f2c4a | ||
|
|
10e2eb4f81 | ||
|
|
c9eba8075f | ||
|
|
9d69c4e4b4 | ||
|
|
292fdad297 | ||
|
|
c04cc2e28e | ||
|
|
7a32c70d13 | ||
|
|
709ee21417 | ||
|
|
1fb53b946c | ||
|
|
1dd18a8808 | ||
|
|
0a5095fe8d | ||
|
|
0f60ba6e65 | ||
|
|
1534aba865 | ||
|
|
0ca0f88121 | ||
|
|
0500ee3d81 | ||
|
|
46a5b335e7 | ||
|
|
914491b8e0 | ||
|
|
ab029d7e92 | ||
|
|
0bd5a039ea | ||
|
|
5c8b2ee9ec | ||
|
|
faf7863bb0 | ||
|
|
d42763a443 | ||
|
|
3c757d5ed2 | ||
|
|
f55523cfdd | ||
|
|
32972518da | ||
|
|
2e7675489f | ||
|
|
80eb0bd9b9 | ||
|
|
4cca2eb1bf | ||
|
|
1c09783f7a | ||
|
|
163281178a | ||
|
|
2fa669f759 | ||
|
|
8ca48a1a54 | ||
|
|
b27bc13af6 | ||
|
|
f7c5a5e967 | ||
|
|
fada8272b6 | ||
|
|
46d72cd2c7 | ||
|
|
19b4e59a1e | ||
|
|
dab284f80f | ||
|
|
9665f15a96 | ||
|
|
2b24afa6d7 | ||
|
|
3166e6840c | ||
|
|
8817a80d3a | ||
|
|
5736d79172 | ||
|
|
fc2ba496fd | ||
|
|
2b9d02167f | ||
|
|
2314b4d89f | ||
|
|
1060f82f89 | ||
|
|
22df97f9c5 | ||
|
|
9c935fbc72 | ||
|
|
deae7c1711 | ||
|
|
941e881e1f | ||
|
|
0cb0fdbbfe | ||
|
|
0831d95c46 | ||
|
|
c26f9b991a | ||
|
|
0c0b78b273 | ||
|
|
3ffb2f5bea | ||
|
|
ae1035646a | ||
|
|
1015ceeeaf | ||
|
|
17ffed1842 | ||
|
|
be9c0884d7 | ||
|
|
48c8424bd9 | ||
|
|
7657ec7ed6 | ||
|
|
07a1250e0e | ||
|
|
69082b38dc | ||
|
|
aa824dd10b | ||
|
|
a12d03e15d | ||
|
|
1a7c9fad9f | ||
|
|
3c7a276234 | ||
|
|
d6f8871964 | ||
|
|
5469a4ab11 | ||
|
|
2c475e48b5 | ||
|
|
7c6eb424d3 | ||
|
|
adba24d207 | ||
|
|
5d7c7d6569 | ||
|
|
d2c8aadf79 | ||
|
|
1ac7f46184 | ||
|
|
05deb747bb | ||
|
|
b505e8517a | ||
|
|
f2e9fa3ef7 | ||
|
|
50a399326f | ||
|
|
1ff88b7aec | ||
|
|
825d3ce386 | ||
|
|
92aa6d6883 | ||
|
|
b2a4db425b | ||
|
|
de49cdbe9d | ||
|
|
9f9c85dda4 | ||
|
|
11734714c2 | ||
|
|
b86ca447ce | ||
|
|
f8c7ba9984 | ||
|
|
76f2bb175d | ||
|
|
f26af78a8a | ||
|
|
bfbecd1174 | ||
|
|
9bd13fe5bb | ||
|
|
459262ac97 | ||
|
|
82ea226c61 | ||
|
|
da4db748fa | ||
|
|
e1eabd7beb | ||
|
|
d81ba7d491 | ||
|
|
5135ed3d4a | ||
|
|
c4b2df872d | ||
|
|
224b5a35f7 | ||
|
|
50ac0e5416 | ||
|
|
e0992d5558 | ||
|
|
5e01315aa1 | ||
|
|
4e4982ab5b | ||
|
|
89e4d86171 | ||
|
|
a1af516259 | ||
|
|
1d64a59547 | ||
|
|
ca7f8b8f31 | ||
|
|
164b03c486 | ||
|
|
e5458d1d88 | ||
|
|
b5e7a2e69d | ||
|
|
2516cafb28 | ||
|
|
fd404bec7e | ||
|
|
fe7866d0ed | ||
|
|
5314b52192 | ||
|
|
13db4e7b9e | ||
|
|
07275b708b | ||
|
|
b85703d11a | ||
|
|
992dc6b486 | ||
|
|
822d66e591 | ||
|
|
8d1ad6378f | ||
|
|
2d1019542a | ||
|
|
b25cac650f | ||
|
|
90a1df305b | ||
|
|
0a6b4b82e9 | ||
|
|
1704c47ba8 | ||
|
|
b76e9cedb3 | ||
|
|
48c88e088c | ||
|
|
a831c2ea90 | ||
|
|
be13a6e525 | ||
|
|
8a3da4c68c | ||
|
|
4d37d4a77c | ||
|
|
7d3b98be4c | ||
|
|
2b3e43e247 | ||
|
|
f60ef66371 | ||
|
|
25836db6be | ||
|
|
587021cd9f | ||
|
|
580ce00782 | ||
|
|
2f1a299c50 | ||
|
|
f6ca640b12 | ||
|
|
3ce2933693 | ||
|
|
c200096c03 | ||
|
|
6d3e7424bf | ||
|
|
5c6d2ef9d1 | ||
|
|
460eb9c50e |
20
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
20
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: Broken site
|
name: Broken site
|
||||||
description: Report broken or misfunctioning site
|
description: Report error in a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
@@ -16,15 +16,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that a **supported** site is broken
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -50,6 +50,8 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -62,7 +64,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -70,8 +72,8 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
@@ -18,13 +18,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -62,6 +62,8 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -74,7 +76,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -82,8 +84,8 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
@@ -18,11 +18,11 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm requesting a site-specific feature
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -58,6 +58,8 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -70,7 +72,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -78,8 +80,8 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
16
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
16
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
@@ -18,13 +18,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -43,6 +43,8 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -55,7 +57,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -63,8 +65,8 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
14
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
14
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
@@ -20,9 +20,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -40,6 +40,8 @@ body:
|
|||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
@@ -51,7 +53,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -59,7 +61,7 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
14
.github/ISSUE_TEMPLATE/6_question.yml
vendored
14
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -7,7 +7,7 @@ body:
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
required: true
|
required: true
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
@@ -26,9 +26,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.08.14** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.03.03** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -46,6 +46,8 @@ body:
|
|||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
@@ -57,7 +59,7 @@ body:
|
|||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2022.08.14 [9d339c4] (win32_exe)
|
[debug] yt-dlp version 2023.03.03 [9d339c4] (win32_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
@@ -65,7 +67,7 @@ body:
|
|||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2022.08.14, Current version: 2022.08.14
|
Latest version: 2023.03.03, Current version: 2023.03.03
|
||||||
yt-dlp is up to date (2022.08.14)
|
yt-dlp is up to date (2023.03.03)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
name: Broken site
|
name: Broken site
|
||||||
description: Report broken or misfunctioning site
|
description: Report error in a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@@ -10,15 +10,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that a **supported** site is broken
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -16,9 +16,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -16,9 +16,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -22,7 +22,7 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -2,8 +2,6 @@
|
|||||||
|
|
||||||
### Description of your *pull request* and other information
|
### Description of your *pull request* and other information
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||||
@@ -32,7 +30,7 @@ ### Before submitting a *pull request* make sure you have:
|
|||||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||||
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
|
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
|
||||||
|
|
||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
||||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
|
||||||
@@ -41,3 +39,5 @@ ### What is the purpose of your *pull request*?
|
|||||||
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||||
- [ ] Core bug fix/improvement
|
- [ ] Core bug fix/improvement
|
||||||
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|||||||
548
.github/workflows/build.yml
vendored
548
.github/workflows/build.yml
vendored
@@ -1,324 +1,356 @@
|
|||||||
name: Build
|
name: Build Artifacts
|
||||||
on: workflow_dispatch
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
required: false
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
meta_files:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: Version tag (YYYY.MM.DD[.REV])
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
description: Update channel (stable/nightly)
|
||||||
|
required: true
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
description: yt-dlp, yt-dlp.tar.gz, yt-dlp_linux, yt-dlp_linux.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
description: yt-dlp_macos_legacy
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
description: yt-dlp_x86.exe
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
meta_files:
|
||||||
|
description: SHA2-256SUMS, SHA2-512SUMS, _update_spec
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
unix:
|
||||||
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
|
||||||
version_suffix: ${{ steps.version_suffix.outputs.version_suffix }}
|
|
||||||
ytdlp_version: ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
head_sha: ${{ steps.push_release.outputs.head_sha }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
- uses: actions/setup-python@v4
|
||||||
fetch-depth: 0
|
with:
|
||||||
- uses: actions/setup-python@v4
|
python-version: "3.10"
|
||||||
with:
|
- uses: conda-incubator/setup-miniconda@v2
|
||||||
python-version: '3.10'
|
with:
|
||||||
|
miniforge-variant: Mambaforge
|
||||||
|
use-mamba: true
|
||||||
|
channels: conda-forge
|
||||||
|
auto-update-conda: true
|
||||||
|
activate-environment: ""
|
||||||
|
auto-activate-base: false
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
sudo apt-get -y install zip pandoc man sed
|
||||||
|
python -m pip install -U pip setuptools wheel
|
||||||
|
python -m pip install -U Pyinstaller -r requirements.txt
|
||||||
|
reqs=$(mktemp)
|
||||||
|
cat > $reqs << EOF
|
||||||
|
python=3.10.*
|
||||||
|
pyinstaller
|
||||||
|
cffi
|
||||||
|
brotli-python
|
||||||
|
EOF
|
||||||
|
sed '/^brotli.*/d' requirements.txt >> $reqs
|
||||||
|
mamba create -n build --file $reqs
|
||||||
|
|
||||||
- name: Set version suffix
|
- name: Prepare
|
||||||
id: version_suffix
|
run: |
|
||||||
env:
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
|
||||||
if: "env.PUSH_VERSION_COMMIT == ''"
|
|
||||||
run: echo ::set-output name=version_suffix::$(date -u +"%H%M%S")
|
|
||||||
- name: Bump version
|
|
||||||
id: bump_version
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py ${{ steps.version_suffix.outputs.version_suffix }}
|
|
||||||
make issuetemplates
|
|
||||||
|
|
||||||
- name: Push to release
|
|
||||||
id: push_release
|
|
||||||
run: |
|
|
||||||
git config --global user.name github-actions
|
|
||||||
git config --global user.email github-actions@example.com
|
|
||||||
git add -u
|
|
||||||
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
|
||||||
echo ::set-output name=head_sha::$(git rev-parse HEAD)
|
|
||||||
- name: Update master
|
|
||||||
env:
|
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
|
||||||
if: "env.PUSH_VERSION_COMMIT != ''"
|
|
||||||
run: git push origin ${{ github.event.ref }}
|
|
||||||
|
|
||||||
|
|
||||||
build_unix:
|
|
||||||
needs: prepare
|
|
||||||
runs-on: ubuntu-18.04 # Standalone executable should be built on minimum supported OS
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
sudo apt-get -y install zip pandoc man
|
|
||||||
python -m pip install --upgrade pip setuptools wheel twine
|
|
||||||
python -m pip install Pyinstaller -r requirements.txt
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build Unix executables
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all tar
|
||||||
|
- name: Build Unix standalone binary
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: |
|
||||||
|
unset LD_LIBRARY_PATH # Harmful; set by setup-python
|
||||||
|
conda activate build
|
||||||
python pyinst.py --onedir
|
python pyinst.py --onedir
|
||||||
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
- name: Get SHA2-SUMS
|
mv ./dist/yt-dlp_linux ./yt-dlp_linux
|
||||||
id: get_sha
|
mv ./dist/yt-dlp_linux.zip ./yt-dlp_linux.zip
|
||||||
run: |
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
yt-dlp
|
yt-dlp
|
||||||
yt-dlp.tar.gz
|
yt-dlp.tar.gz
|
||||||
dist/yt-dlp_linux
|
yt-dlp_linux
|
||||||
dist/yt-dlp_linux.zip
|
yt-dlp_linux.zip
|
||||||
|
|
||||||
- name: Build and publish on PyPi
|
linux_arm:
|
||||||
env:
|
if: inputs.linux_arm
|
||||||
TWINE_USERNAME: __token__
|
permissions:
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
contents: read
|
||||||
if: "env.TWINE_PASSWORD != ''"
|
packages: write # for creating cache
|
||||||
run: |
|
runs-on: ubuntu-latest
|
||||||
rm -rf dist/*
|
strategy:
|
||||||
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
matrix:
|
||||||
python setup.py sdist bdist_wheel
|
architecture:
|
||||||
twine upload dist/*
|
- armv7
|
||||||
|
- aarch64
|
||||||
- name: Install SSH private key for Homebrew
|
|
||||||
env:
|
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
|
||||||
if: "env.BREW_TOKEN != ''"
|
|
||||||
uses: yt-dlp/ssh-agent@v0.5.3
|
|
||||||
with:
|
|
||||||
ssh-private-key: ${{ env.BREW_TOKEN }}
|
|
||||||
- name: Update Homebrew Formulae
|
|
||||||
env:
|
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
|
||||||
if: "env.BREW_TOKEN != ''"
|
|
||||||
run: |
|
|
||||||
git clone git@github.com:yt-dlp/homebrew-taps taps/
|
|
||||||
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.ytdlp_version }}"
|
|
||||||
git -C taps/ config user.name github-actions
|
|
||||||
git -C taps/ config user.email github-actions@example.com
|
|
||||||
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.ytdlp_version }}'
|
|
||||||
git -C taps/ push
|
|
||||||
|
|
||||||
|
|
||||||
build_macos:
|
|
||||||
runs-on: macos-11
|
|
||||||
needs: prepare
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
# NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
with:
|
||||||
- name: Install Requirements
|
path: ./repo
|
||||||
run: |
|
- name: Virtualized Install, Prepare & Build
|
||||||
|
uses: yt-dlp/run-on-arch-action@v2
|
||||||
|
with:
|
||||||
|
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||||
|
env: |
|
||||||
|
GITHUB_WORKFLOW: build
|
||||||
|
githubToken: ${{ github.token }} # To cache image
|
||||||
|
arch: ${{ matrix.architecture }}
|
||||||
|
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||||
|
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||||
|
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||||
|
apt update
|
||||||
|
apt -y install zlib1g-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||||
|
python3.8 -m pip install -U pip setuptools wheel
|
||||||
|
# Cannot access requirements.txt from the repo directory at this stage
|
||||||
|
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi
|
||||||
|
|
||||||
|
run: |
|
||||||
|
cd repo
|
||||||
|
python3.8 -m pip install -U Pyinstaller -r requirements.txt # Cached version may be out of date
|
||||||
|
python3.8 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
|
python3.8 devscripts/make_lazy_extractors.py
|
||||||
|
python3.8 pyinst.py
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: | # run-on-arch-action designates armv7l as armv7
|
||||||
|
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||||
|
|
||||||
|
macos:
|
||||||
|
if: inputs.macos
|
||||||
|
runs-on: macos-11
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
# NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
/usr/bin/python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
/usr/bin/python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
/usr/bin/python3 devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
/usr/bin/python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
/usr/bin/python3 devscripts/make_lazy_extractors.py
|
/usr/bin/python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
||||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||||
/usr/bin/python3 pyinst.py --target-architecture universal2
|
/usr/bin/python3 pyinst.py --target-architecture universal2
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_macos
|
dist/yt-dlp_macos
|
||||||
dist/yt-dlp_macos.zip
|
dist/yt-dlp_macos.zip
|
||||||
|
|
||||||
|
macos_legacy:
|
||||||
build_macos_legacy:
|
if: inputs.macos_legacy
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
needs: prepare
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
# We need the official Python, because the GA ones only support newer macOS versions
|
# We need the official Python, because the GA ones only support newer macOS versions
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: 3.10.5
|
PYTHON_VERSION: 3.10.5
|
||||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||||
run: |
|
run: |
|
||||||
# Hack to get the latest patch version. Uncomment if needed
|
# Hack to get the latest patch version. Uncomment if needed
|
||||||
#brew install python@3.10
|
#brew install python@3.10
|
||||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||||
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
||||||
sudo installer -pkg python.pkg -target /
|
sudo installer -pkg python.pkg -target /
|
||||||
python3 --version
|
python3 --version
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python3 pyinst.py
|
python3 pyinst.py
|
||||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_macos_legacy
|
dist/yt-dlp_macos_legacy
|
||||||
|
|
||||||
|
windows:
|
||||||
build_windows:
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: prepare
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with: # 3.8 is used for Win7 support
|
with: # 3.8 is used for Win7 support
|
||||||
python-version: '3.8'
|
python-version: "3.8"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||||
python -m pip install --upgrade pip setuptools wheel py2exe
|
python -m pip install -U pip setuptools wheel py2exe
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.2-py3-none-any.whl" -r requirements.txt
|
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python setup.py py2exe
|
python setup.py py2exe
|
||||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
python pyinst.py --onedir
|
python pyinst.py --onedir
|
||||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp.exe
|
dist/yt-dlp.exe
|
||||||
dist/yt-dlp_min.exe
|
dist/yt-dlp_min.exe
|
||||||
dist/yt-dlp_win.zip
|
dist/yt-dlp_win.zip
|
||||||
|
|
||||||
|
windows32:
|
||||||
build_windows32:
|
if: inputs.windows32
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: prepare
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
||||||
python-version: '3.7'
|
python-version: "3.7"
|
||||||
architecture: 'x86'
|
architecture: "x86"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip setuptools wheel
|
python -m pip install -U pip setuptools wheel
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.2-py3-none-any.whl" -r requirements.txt
|
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_x86.exe
|
dist/yt-dlp_x86.exe
|
||||||
|
|
||||||
|
meta_files:
|
||||||
publish_release:
|
if: inputs.meta_files && always()
|
||||||
|
needs:
|
||||||
|
- unix
|
||||||
|
- linux_arm
|
||||||
|
- macos
|
||||||
|
- macos_legacy
|
||||||
|
- windows
|
||||||
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [prepare, build_unix, build_windows, build_windows32, build_macos, build_macos_legacy]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/download-artifact@v3
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
|
|
||||||
- name: Get Changelog
|
- name: Make SHA2-SUMS files
|
||||||
run: |
|
run: |
|
||||||
changelog=$(grep -oPz '(?s)(?<=### ${{ needs.prepare.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)' Changelog.md) || true
|
cd ./artifact/
|
||||||
echo "changelog<<EOF" >> $GITHUB_ENV
|
sha256sum * > ../SHA2-256SUMS
|
||||||
echo "$changelog" >> $GITHUB_ENV
|
sha512sum * > ../SHA2-512SUMS
|
||||||
echo "EOF" >> $GITHUB_ENV
|
|
||||||
- name: Make Update spec
|
|
||||||
run: |
|
|
||||||
echo "# This file is used for regulating self-update" >> _update_spec
|
|
||||||
echo "lock 2022.07.18 .+ Python 3.6" >> _update_spec
|
|
||||||
- name: Make SHA2-SUMS files
|
|
||||||
run: |
|
|
||||||
sha256sum artifact/yt-dlp | awk '{print $1 " yt-dlp"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp.tar.gz | awk '{print $1 " yt-dlp.tar.gz"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp.exe | awk '{print $1 " yt-dlp.exe"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_win.zip | awk '{print $1 " yt-dlp_win.zip"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_min.exe | awk '{print $1 " yt-dlp_min.exe"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_x86.exe | awk '{print $1 " yt-dlp_x86.exe"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_macos | awk '{print $1 " yt-dlp_macos"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_macos.zip | awk '{print $1 " yt-dlp_macos.zip"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/yt-dlp_macos_legacy | awk '{print $1 " yt-dlp_macos_legacy"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/dist/yt-dlp_linux | awk '{print $1 " yt-dlp_linux"}' >> SHA2-256SUMS
|
|
||||||
sha256sum artifact/dist/yt-dlp_linux.zip | awk '{print $1 " yt-dlp_linux.zip"}' >> SHA2-256SUMS
|
|
||||||
sha512sum artifact/yt-dlp | awk '{print $1 " yt-dlp"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp.tar.gz | awk '{print $1 " yt-dlp.tar.gz"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp.exe | awk '{print $1 " yt-dlp.exe"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_win.zip | awk '{print $1 " yt-dlp_win.zip"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_min.exe | awk '{print $1 " yt-dlp_min.exe"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_x86.exe | awk '{print $1 " yt-dlp_x86.exe"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_macos | awk '{print $1 " yt-dlp_macos"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_macos.zip | awk '{print $1 " yt-dlp_macos.zip"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/yt-dlp_macos_legacy | awk '{print $1 " yt-dlp_macos_legacy"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/dist/yt-dlp_linux | awk '{print $1 " yt-dlp_linux"}' >> SHA2-512SUMS
|
|
||||||
sha512sum artifact/dist/yt-dlp_linux.zip | awk '{print $1 " yt-dlp_linux.zip"}' >> SHA2-512SUMS
|
|
||||||
|
|
||||||
- name: Publish Release
|
- name: Make Update spec
|
||||||
uses: yt-dlp/action-gh-release@v1
|
run: |
|
||||||
with:
|
cat >> _update_spec << EOF
|
||||||
tag_name: ${{ needs.prepare.outputs.ytdlp_version }}
|
# This file is used for regulating self-update
|
||||||
name: yt-dlp ${{ needs.prepare.outputs.ytdlp_version }}
|
lock 2022.08.18.36 .+ Python 3.6
|
||||||
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
EOF
|
||||||
body: |
|
|
||||||
#### [A description of the various files]((https://github.com/yt-dlp/yt-dlp#release-files)) are in the README
|
|
||||||
|
|
||||||
---
|
- name: Sign checksum files
|
||||||
<details open><summary><h3>Changelog</summary>
|
env:
|
||||||
<p>
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
if: env.GPG_SIGNING_KEY != ''
|
||||||
|
run: |
|
||||||
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
|
for signfile in ./SHA*SUMS; do
|
||||||
|
gpg --batch --detach-sign "$signfile"
|
||||||
|
done
|
||||||
|
|
||||||
${{ env.changelog }}
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
</p>
|
with:
|
||||||
</details>
|
path: |
|
||||||
files: |
|
SHA*SUMS*
|
||||||
SHA2-256SUMS
|
_update_spec
|
||||||
SHA2-512SUMS
|
|
||||||
artifact/yt-dlp
|
|
||||||
artifact/yt-dlp.tar.gz
|
|
||||||
artifact/yt-dlp.exe
|
|
||||||
artifact/yt-dlp_win.zip
|
|
||||||
artifact/yt-dlp_min.exe
|
|
||||||
artifact/yt-dlp_x86.exe
|
|
||||||
artifact/yt-dlp_macos
|
|
||||||
artifact/yt-dlp_macos.zip
|
|
||||||
artifact/yt-dlp_macos_legacy
|
|
||||||
artifact/dist/yt-dlp_linux
|
|
||||||
artifact/dist/yt-dlp_linux.zip
|
|
||||||
_update_spec
|
|
||||||
|
|||||||
14
.github/workflows/core.yml
vendored
14
.github/workflows/core.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: Core Tests
|
name: Core Tests
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Tests
|
name: Core Tests
|
||||||
@@ -9,13 +12,13 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.9 is in quick-test
|
# CPython 3.11 is in quick-test
|
||||||
python-version: ['3.7', '3.10', 3.11-dev, pypy-3.7, pypy-3.8]
|
python-version: ['3.8', '3.9', '3.10', pypy-3.7, pypy-3.8]
|
||||||
run-tests-ext: [sh]
|
run-tests-ext: [sh]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.8'
|
python-version: '3.7'
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: pypy-3.9
|
||||||
@@ -30,5 +33,6 @@ jobs:
|
|||||||
run: pip install pytest
|
run: pip install pytest
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: False
|
continue-on-error: False
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
run: |
|
||||||
# Linter is in quick-test
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
|
./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
||||||
|
|||||||
3
.github/workflows/download.yml
vendored
3
.github/workflows/download.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: Download Tests
|
name: Download Tests
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
quick:
|
quick:
|
||||||
name: Quick Download Tests
|
name: Quick Download Tests
|
||||||
|
|||||||
80
.github/workflows/publish.yml
vendored
Normal file
80
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
name: Publish
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
nightly:
|
||||||
|
default: false
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
target_commitish:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
ARCHIVE_REPO_TOKEN:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
run: |
|
||||||
|
cat >> ./RELEASE_NOTES << EOF
|
||||||
|
#### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files)
|
||||||
|
---
|
||||||
|
<details><summary><h3>Changelog</h3></summary>
|
||||||
|
$(python ./devscripts/make_changelog.py -vv)
|
||||||
|
</details>
|
||||||
|
EOF
|
||||||
|
echo "**This is an automated nightly pre-release build**" >> ./PRERELEASE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
|
echo "Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}" >> ./ARCHIVE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
|
- name: Archive nightly release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
|
GH_REPO: ${{ vars.ARCHIVE_REPO }}
|
||||||
|
if: |
|
||||||
|
inputs.nightly && env.GH_TOKEN != '' && env.GH_REPO != ''
|
||||||
|
run: |
|
||||||
|
gh release create \
|
||||||
|
--notes-file ARCHIVE_NOTES \
|
||||||
|
--title "Build ${{ inputs.version }}" \
|
||||||
|
${{ inputs.version }} \
|
||||||
|
artifact/*
|
||||||
|
|
||||||
|
- name: Prune old nightly release
|
||||||
|
if: inputs.nightly
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
gh release delete --yes --cleanup-tag "nightly" || true
|
||||||
|
git tag --delete "nightly" || true
|
||||||
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
|
- name: Publish release${{ inputs.nightly && ' (nightly)' || '' }}
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
gh release create \
|
||||||
|
--notes-file ${{ inputs.nightly && 'PRE' || '' }}RELEASE_NOTES \
|
||||||
|
--target ${{ inputs.target_commitish }} \
|
||||||
|
--title "yt-dlp ${{ inputs.nightly && 'nightly ' || '' }}${{ inputs.version }}" \
|
||||||
|
${{ inputs.nightly && '--prerelease "nightly"' || inputs.version }} \
|
||||||
|
artifact/*
|
||||||
16
.github/workflows/quick-test.yml
vendored
16
.github/workflows/quick-test.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: Quick Test
|
name: Quick Test
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Test
|
name: Core Test
|
||||||
@@ -7,24 +10,23 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.11'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: pip install pytest pycryptodomex
|
run: pip install pytest pycryptodomex
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: ./devscripts/run_tests.sh core
|
run: |
|
||||||
|
python3 -m yt_dlp -v || true
|
||||||
|
./devscripts/run_tests.sh core
|
||||||
flake8:
|
flake8:
|
||||||
name: Linter
|
name: Linter
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- uses: actions/setup-python@v4
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install flake8
|
- name: Install flake8
|
||||||
run: pip install flake8
|
run: pip install flake8
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
|
|||||||
51
.github/workflows/release-nightly.yml
vendored
Normal file
51
.github/workflows/release-nightly.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
name: Release (nightly)
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "**.py"
|
||||||
|
- "!yt_dlp/version.py"
|
||||||
|
concurrency:
|
||||||
|
group: release-nightly
|
||||||
|
cancel-in-progress: true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
if: vars.BUILD_NIGHTLY != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Get version
|
||||||
|
id: get_version
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py "$(date -u +"%H%M%S")" | grep -Po "version=\d+(\.\d+){3}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: nightly
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # For package cache
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
|
publish:
|
||||||
|
needs: [prepare, build]
|
||||||
|
uses: ./.github/workflows/publish.yml
|
||||||
|
secrets:
|
||||||
|
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
with:
|
||||||
|
nightly: true
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_commitish: ${{ github.sha }}
|
||||||
127
.github/workflows/release.yml
vendored
Normal file
127
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
name: Release
|
||||||
|
on: workflow_dispatch
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.update_version.outputs.version }}
|
||||||
|
head_sha: ${{ steps.push_release.outputs.head_sha }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Update version
|
||||||
|
id: update_version
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py ${{ vars.PUSH_VERSION_COMMIT == '' && '"$(date -u +"%H%M%S")"' || '' }} | \
|
||||||
|
grep -Po "version=\d+\.\d+\.\d+(\.\d+)?" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update documentation
|
||||||
|
run: |
|
||||||
|
make doc
|
||||||
|
sed '/### /Q' Changelog.md >> ./CHANGELOG
|
||||||
|
echo '### ${{ steps.update_version.outputs.version }}' >> ./CHANGELOG
|
||||||
|
python ./devscripts/make_changelog.py -vv -c >> ./CHANGELOG
|
||||||
|
echo >> ./CHANGELOG
|
||||||
|
grep -Poz '(?s)### \d+\.\d+\.\d+.+' 'Changelog.md' | head -n -1 >> ./CHANGELOG
|
||||||
|
cat ./CHANGELOG > Changelog.md
|
||||||
|
|
||||||
|
- name: Push to release
|
||||||
|
id: push_release
|
||||||
|
run: |
|
||||||
|
git config --global user.name github-actions
|
||||||
|
git config --global user.email github-actions@example.com
|
||||||
|
git add -u
|
||||||
|
git commit -m "Release ${{ steps.update_version.outputs.version }}" \
|
||||||
|
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||||
|
git push origin --force ${{ github.event.ref }}:release
|
||||||
|
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update master
|
||||||
|
if: vars.PUSH_VERSION_COMMIT != ''
|
||||||
|
run: git push origin ${{ github.event.ref }}
|
||||||
|
|
||||||
|
publish_pypi_homebrew:
|
||||||
|
needs: prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
python -m pip install -U pip setuptools wheel twine
|
||||||
|
python -m pip install -U -r requirements.txt
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py ${{ needs.prepare.outputs.version }}
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
|
- name: Build and publish on PyPI
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: __token__
|
||||||
|
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.TWINE_PASSWORD != ''
|
||||||
|
run: |
|
||||||
|
rm -rf dist/*
|
||||||
|
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
|
twine upload dist/*
|
||||||
|
|
||||||
|
- name: Checkout Homebrew repository
|
||||||
|
env:
|
||||||
|
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != ''
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: yt-dlp/homebrew-taps
|
||||||
|
path: taps
|
||||||
|
ssh-key: ${{ secrets.BREW_TOKEN }}
|
||||||
|
|
||||||
|
- name: Update Homebrew Formulae
|
||||||
|
env:
|
||||||
|
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != ''
|
||||||
|
run: |
|
||||||
|
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.version }}"
|
||||||
|
git -C taps/ config user.name github-actions
|
||||||
|
git -C taps/ config user.email github-actions@example.com
|
||||||
|
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.version }}'
|
||||||
|
git -C taps/ push
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # For package cache
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
|
publish:
|
||||||
|
needs: [prepare, build]
|
||||||
|
uses: ./.github/workflows/publish.yml
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
||||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -30,16 +30,18 @@ cookies
|
|||||||
*.f4v
|
*.f4v
|
||||||
*.flac
|
*.flac
|
||||||
*.flv
|
*.flv
|
||||||
|
*.gif
|
||||||
*.jpeg
|
*.jpeg
|
||||||
*.jpg
|
*.jpg
|
||||||
*.m4a
|
*.m4a
|
||||||
*.mpga
|
|
||||||
*.m4v
|
*.m4v
|
||||||
*.mhtml
|
*.mhtml
|
||||||
*.mkv
|
*.mkv
|
||||||
*.mov
|
*.mov
|
||||||
*.mp3
|
*.mp3
|
||||||
*.mp4
|
*.mp4
|
||||||
|
*.mpga
|
||||||
|
*.oga
|
||||||
*.ogg
|
*.ogg
|
||||||
*.opus
|
*.opus
|
||||||
*.png
|
*.png
|
||||||
@@ -47,6 +49,7 @@ cookies
|
|||||||
*.srt
|
*.srt
|
||||||
*.swf
|
*.swf
|
||||||
*.swp
|
*.swp
|
||||||
|
*.tt
|
||||||
*.ttml
|
*.ttml
|
||||||
*.url
|
*.url
|
||||||
*.vtt
|
*.vtt
|
||||||
@@ -69,6 +72,7 @@ dist/
|
|||||||
zip/
|
zip/
|
||||||
tmp/
|
tmp/
|
||||||
venv/
|
venv/
|
||||||
|
.venv/
|
||||||
completions/
|
completions/
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
@@ -85,6 +89,7 @@ updates_key.pem
|
|||||||
.tox
|
.tox
|
||||||
*.class
|
*.class
|
||||||
*.isorted
|
*.isorted
|
||||||
|
*.stackdump
|
||||||
|
|
||||||
# Generated
|
# Generated
|
||||||
AUTHORS
|
AUTHORS
|
||||||
@@ -116,9 +121,5 @@ yt-dlp.zip
|
|||||||
*/extractor/lazy_extractors.py
|
*/extractor/lazy_extractors.py
|
||||||
|
|
||||||
# Plugins
|
# Plugins
|
||||||
ytdlp_plugins/extractor/*
|
ytdlp_plugins/
|
||||||
!ytdlp_plugins/extractor/__init__.py
|
yt-dlp-plugins
|
||||||
!ytdlp_plugins/extractor/sample.py
|
|
||||||
ytdlp_plugins/postprocessor/*
|
|
||||||
!ytdlp_plugins/postprocessor/__init__.py
|
|
||||||
!ytdlp_plugins/postprocessor/sample.py
|
|
||||||
|
|||||||
@@ -127,7 +127,7 @@ ### Are you willing to share account details if needed?
|
|||||||
|
|
||||||
### Is the website primarily used for piracy?
|
### Is the website primarily used for piracy?
|
||||||
|
|
||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in deep fake. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -161,7 +161,7 @@ ## Adding new feature or making overarching changes
|
|||||||
|
|
||||||
## Adding support for a new site
|
## Adding support for a new site
|
||||||
|
|
||||||
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](https://www.github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](#is-the-website-primarily-used-for-piracy)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
||||||
|
|
||||||
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
||||||
|
|
||||||
@@ -351,8 +351,9 @@ #### Example
|
|||||||
```python
|
```python
|
||||||
thumbnail_data = data.get('thumbnails') or []
|
thumbnail_data = data.get('thumbnails') or []
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
} for item in thumbnail_data] # correct
|
'height': item.get('h'),
|
||||||
|
} for item in thumbnail_data if item.get('url')] # correct
|
||||||
```
|
```
|
||||||
|
|
||||||
and not like:
|
and not like:
|
||||||
@@ -360,12 +361,27 @@ #### Example
|
|||||||
```python
|
```python
|
||||||
thumbnail_data = data.get('thumbnails')
|
thumbnail_data = data.get('thumbnails')
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
} for item in thumbnail_data] # incorrect
|
} for item in thumbnail_data] # incorrect
|
||||||
```
|
```
|
||||||
|
|
||||||
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
||||||
|
|
||||||
|
Alternately, this can be further simplified by using `traverse_obj`
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = [{
|
||||||
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
|
} for item in traverse_obj(data, ('thumbnails', lambda _, v: v['url']))]
|
||||||
|
```
|
||||||
|
|
||||||
|
or, even better,
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = traverse_obj(data, ('thumbnails', ..., {'url': 'url', 'height': 'h'}))
|
||||||
|
```
|
||||||
|
|
||||||
### Provide fallbacks
|
### Provide fallbacks
|
||||||
|
|
||||||
|
|||||||
109
CONTRIBUTORS
109
CONTRIBUTORS
@@ -3,6 +3,8 @@ shirt-dev (collaborator)
|
|||||||
coletdjnz/colethedj (collaborator)
|
coletdjnz/colethedj (collaborator)
|
||||||
Ashish0804 (collaborator)
|
Ashish0804 (collaborator)
|
||||||
nao20010128nao/Lesmiscore (collaborator)
|
nao20010128nao/Lesmiscore (collaborator)
|
||||||
|
bashonly (collaborator)
|
||||||
|
Grub4K (collaborator)
|
||||||
h-h-h-h
|
h-h-h-h
|
||||||
pauldubois98
|
pauldubois98
|
||||||
nixxo
|
nixxo
|
||||||
@@ -295,7 +297,112 @@ Mehavoid
|
|||||||
winterbird-code
|
winterbird-code
|
||||||
yashkc2025
|
yashkc2025
|
||||||
aldoridhoni
|
aldoridhoni
|
||||||
bashonly
|
|
||||||
jacobtruman
|
jacobtruman
|
||||||
masta79
|
masta79
|
||||||
palewire
|
palewire
|
||||||
|
cgrigis
|
||||||
|
DavidH-2022
|
||||||
|
dfaker
|
||||||
|
jackyyf
|
||||||
|
ohaiibuzzle
|
||||||
|
SamantazFox
|
||||||
|
shreyasminocha
|
||||||
|
tejasa97
|
||||||
|
xenov
|
||||||
|
satan1st
|
||||||
|
0xGodspeed
|
||||||
|
5736d79
|
||||||
|
587021c
|
||||||
|
basrieter
|
||||||
|
Bobscorn
|
||||||
|
CNugteren
|
||||||
|
columndeeply
|
||||||
|
DoubleCouponDay
|
||||||
|
Fabi019
|
||||||
|
GautamMKGarg
|
||||||
|
itachi-19
|
||||||
|
jeroenj
|
||||||
|
josanabr
|
||||||
|
LiviaMedeiros
|
||||||
|
nikita-moor
|
||||||
|
snapdgn
|
||||||
|
SuperSonicHub1
|
||||||
|
tannertechnology
|
||||||
|
Timendum
|
||||||
|
tobi1805
|
||||||
|
TokyoBlackHole
|
||||||
|
ajayyy
|
||||||
|
Alienmaster
|
||||||
|
bsun0000
|
||||||
|
changren-wcr
|
||||||
|
ClosedPort22
|
||||||
|
CrankDatSouljaBoy
|
||||||
|
cruel-efficiency
|
||||||
|
endotronic
|
||||||
|
Generator
|
||||||
|
gibson042
|
||||||
|
How-Bout-No
|
||||||
|
invertico
|
||||||
|
jahway603
|
||||||
|
jwoglom
|
||||||
|
lksj
|
||||||
|
megapro17
|
||||||
|
mlampe
|
||||||
|
MrOctopus
|
||||||
|
nosoop
|
||||||
|
puc9
|
||||||
|
sashashura
|
||||||
|
schnusch
|
||||||
|
SG5
|
||||||
|
the-marenga
|
||||||
|
tkgmomosheep
|
||||||
|
vitkhab
|
||||||
|
glensc
|
||||||
|
synthpop123
|
||||||
|
tntmod54321
|
||||||
|
milkknife
|
||||||
|
Bnyro
|
||||||
|
CapacitorSet
|
||||||
|
stelcodes
|
||||||
|
skbeh
|
||||||
|
muddi900
|
||||||
|
digitall
|
||||||
|
chengzhicn
|
||||||
|
mexus
|
||||||
|
JChris246
|
||||||
|
redraskal
|
||||||
|
Spicadox
|
||||||
|
barsnick
|
||||||
|
docbender
|
||||||
|
KurtBestor
|
||||||
|
Chrissi2812
|
||||||
|
FrederikNS
|
||||||
|
gschizas
|
||||||
|
JC-Chung
|
||||||
|
mzhou
|
||||||
|
OndrejBakan
|
||||||
|
ab4cbef
|
||||||
|
aionescu
|
||||||
|
amra
|
||||||
|
ByteDream
|
||||||
|
carusocr
|
||||||
|
chexxor
|
||||||
|
felixonmars
|
||||||
|
FrankZ85
|
||||||
|
FriedrichRehren
|
||||||
|
gregsadetsky
|
||||||
|
LeoniePhiline
|
||||||
|
LowSuggestion912
|
||||||
|
Matumo
|
||||||
|
OIRNOIR
|
||||||
|
OMEGARAZER
|
||||||
|
oxamun
|
||||||
|
pmitchell86
|
||||||
|
qbnu
|
||||||
|
qulaz
|
||||||
|
rebane2001
|
||||||
|
road-master
|
||||||
|
rohieb
|
||||||
|
sdht0
|
||||||
|
seproDev
|
||||||
|
Hill-98
|
||||||
|
|||||||
604
Changelog.md
604
Changelog.md
@@ -1,15 +1,601 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
# Instuctions for creating release
|
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
||||||
|
|
||||||
* Run `make doc`
|
|
||||||
* Update Changelog.md and CONTRIBUTORS
|
|
||||||
* Change "Based on ytdl" version in Readme.md if needed
|
|
||||||
* Commit as `Release <version>` and push to master
|
|
||||||
* Dispatch the workflow https://github.com/yt-dlp/yt-dlp/actions/workflows/build.yml on master
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### 2023.03.03
|
||||||
|
|
||||||
|
#### Important changes
|
||||||
|
- **A new release type has been added!**
|
||||||
|
* [`nightly`](https://github.com/yt-dlp/yt-dlp/releases/tag/nightly) builds will be made after each push, containing the latest fixes (but also possibly bugs).
|
||||||
|
* When using `--update`/`-U`, a release binary will only update to its current channel (either `stable` or `nightly`).
|
||||||
|
* The `--update-to` option has been added allowing the user more control over program upgrades (or downgrades).
|
||||||
|
* `--update-to` can change the release channel (`stable`, `nightly`) and also upgrade or downgrade to specific tags.
|
||||||
|
* **Usage**: `--update-to CHANNEL`, `--update-to TAG`, `--update-to CHANNEL@TAG`
|
||||||
|
- **YouTube throttling fixes!**
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Add option `--break-match-filters`](https://github.com/yt-dlp/yt-dlp/commit/fe2ce85aff0aa03735fc0152bb8cb9c3d4ef0753) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- [Fix `--break-on-existing` with `--lazy-playlist`](https://github.com/yt-dlp/yt-dlp/commit/d21056f4cf0a1623daa107f9181074f5725ac436) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- dependencies
|
||||||
|
- [Simplify `Cryptodome`](https://github.com/yt-dlp/yt-dlp/commit/65f6e807804d2af5e00f2aecd72bfc43af19324a) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- jsinterp
|
||||||
|
- [Handle `Date` at epoch 0](https://github.com/yt-dlp/yt-dlp/commit/9acf1ee25f7ad3920ede574a9de95b8c18626af4) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- plugins
|
||||||
|
- [Don't look in `.egg` directories](https://github.com/yt-dlp/yt-dlp/commit/b059188383eee4fa336ef728dda3ff4bb7335625) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- update
|
||||||
|
- [Add option `--update-to`, including to nightly](https://github.com/yt-dlp/yt-dlp/commit/77df20f14cc9ed41dfe3a1fe2d77fd27f5365a94) ([#6220](https://github.com/yt-dlp/yt-dlp/issues/6220)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K), [pukkandan](https://github.com/pukkandan)
|
||||||
|
- utils
|
||||||
|
- `LenientJSONDecoder`: [Parse unclosed objects](https://github.com/yt-dlp/yt-dlp/commit/cc09083636ce21e58ff74f45eac2dbda507462b0) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- `Popen`: [Shim undocumented `text_mode` property](https://github.com/yt-dlp/yt-dlp/commit/da8e2912b165005f76779a115a071cd6132ceedf) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- [Fix DRM detection in m3u8](https://github.com/yt-dlp/yt-dlp/commit/43a3eaf96393b712d60cbcf5c6cb1e90ed7f42f5) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- generic
|
||||||
|
- [Detect manifest links via extension](https://github.com/yt-dlp/yt-dlp/commit/b38cae49e6f4849c8ee2a774bdc3c1c647ae5f0e) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Handle basic-auth when checking redirects](https://github.com/yt-dlp/yt-dlp/commit/8e9fe43cd393e69fa49b3d842aa3180c1d105b8f) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- GoogleDrive
|
||||||
|
- [Fix some audio](https://github.com/yt-dlp/yt-dlp/commit/4d248e29d20d983ededab0b03d4fe69dff9eb4ed) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- iprima
|
||||||
|
- [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/9fddc12ab022a31754e0eaa358fc4e1dfa974587) ([#6291](https://github.com/yt-dlp/yt-dlp/issues/6291)) by [std-move](https://github.com/std-move)
|
||||||
|
- mediastream
|
||||||
|
- [Improve WinSports support](https://github.com/yt-dlp/yt-dlp/commit/2d5a8c5db2bd4ff1c2e45e00cd890a10f8ffca9e) ([#6401](https://github.com/yt-dlp/yt-dlp/issues/6401)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- ntvru
|
||||||
|
- [Extract HLS and DASH formats](https://github.com/yt-dlp/yt-dlp/commit/77d6d136468d0c23c8e79bc937898747804f585a) ([#6403](https://github.com/yt-dlp/yt-dlp/issues/6403)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- tencent
|
||||||
|
- [Add more formats and info](https://github.com/yt-dlp/yt-dlp/commit/18d295c9e0f95adc179eef345b7af64d6372db78) ([#5950](https://github.com/yt-dlp/yt-dlp/issues/5950)) by [Hill-98](https://github.com/Hill-98)
|
||||||
|
- yle_areena
|
||||||
|
- [Extract non-Kaltura videos](https://github.com/yt-dlp/yt-dlp/commit/40d77d89027cd0e0ce31d22aec81db3e1d433900) ([#6402](https://github.com/yt-dlp/yt-dlp/issues/6402)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- youtube
|
||||||
|
- [Construct dash formats with `range` query](https://github.com/yt-dlp/yt-dlp/commit/5038f6d713303e0967d002216e7a88652401c22a) by [pukkandan](https://github.com/pukkandan) (With fixes in [f34804b](https://github.com/yt-dlp/yt-dlp/commit/f34804b2f920f62a6e893a14a9e2a2144b14dd23) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz))
|
||||||
|
- [Detect and break on looping comments](https://github.com/yt-dlp/yt-dlp/commit/7f51861b1820c37b157a239b1fe30628d907c034) ([#6301](https://github.com/yt-dlp/yt-dlp/issues/6301)) by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
- [Extract channel `view_count` when `/about` tab is passed](https://github.com/yt-dlp/yt-dlp/commit/31e183557fcd1b937582f9429f29207c1261f501) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- build
|
||||||
|
- [Add `cffi` as a dependency for `yt_dlp_linux`](https://github.com/yt-dlp/yt-dlp/commit/776d1c3f0c9b00399896dd2e40e78e9a43218109) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Automated builds and nightly releases](https://github.com/yt-dlp/yt-dlp/commit/29cb20bd563c02671b31dd840139e93dd37150a1) ([#6220](https://github.com/yt-dlp/yt-dlp/issues/6220)) by [bashonly](https://github.com/bashonly), [Grub4K](https://github.com/Grub4K) (With fixes in [bfc861a](https://github.com/yt-dlp/yt-dlp/commit/bfc861a91ee65c9b0ac169754f512e052c6827cf) by [pukkandan](https://github.com/pukkandan))
|
||||||
|
- [Sign SHA files and release public key](https://github.com/yt-dlp/yt-dlp/commit/12647e03d417feaa9ea6a458bea5ebd747494a53) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
- cleanup
|
||||||
|
- [Fix `Changelog`](https://github.com/yt-dlp/yt-dlp/commit/17ca19ab60a6a13eb8a629c51442b5248b0d8394) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- jsinterp: [Give functions names to help debugging](https://github.com/yt-dlp/yt-dlp/commit/b2e0343ba0fc5d8702e90f6ba2b71358e2677e0b) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- Miscellaneous: [4815bbf](https://github.com/yt-dlp/yt-dlp/commit/4815bbfc41cf641e4a0650289dbff968cb3bde76), [5b28cef](https://github.com/yt-dlp/yt-dlp/commit/5b28cef72db3b531680d89c121631c73ae05354f) by [pukkandan](https://github.com/pukkandan)
|
||||||
|
- devscripts
|
||||||
|
- [Script to generate changelog](https://github.com/yt-dlp/yt-dlp/commit/d400e261cf029a3f20d364113b14de973be75404) ([#6220](https://github.com/yt-dlp/yt-dlp/issues/6220)) by [Grub4K](https://github.com/Grub4K) (With fixes in [9344964](https://github.com/yt-dlp/yt-dlp/commit/93449642815a6973a4b09b289982ca7e1f961b5f))
|
||||||
|
|
||||||
|
### 2023.02.17
|
||||||
|
|
||||||
|
* Merge youtube-dl: Upto [commit/2dd6c6e](https://github.com/ytdl-org/youtube-dl/commit/2dd6c6e)
|
||||||
|
* Fix `--concat-playlist`
|
||||||
|
* Imply `--no-progress` when `--print`
|
||||||
|
* Improve default subtitle language selection by [sdht0](https://github.com/sdht0)
|
||||||
|
* Make `title` completely non-fatal
|
||||||
|
* Sanitize formats before sorting by [pukkandan](https://github.com/pukkandan)
|
||||||
|
* Support module level `__bool__` and `property`
|
||||||
|
* [dependencies] Standardize `Cryptodome` imports
|
||||||
|
* [hls] Allow extractors to provide AES key by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [ExtractAudio] Handle outtmpl without ext by [carusocr](https://github.com/carusocr)
|
||||||
|
* [extractor/common] Fix `_search_nuxt_data` by [LowSuggestion912](https://github.com/LowSuggestion912)
|
||||||
|
* [extractor/generic] Avoid catastrophic backtracking in KVS regex by [bashonly](https://github.com/bashonly)
|
||||||
|
* [jsinterp] Support `if` statements
|
||||||
|
* [plugins] Fix zip search paths
|
||||||
|
* [utils] `traverse_obj`: Various improvements by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [utils] `traverse_obj`: Fix more bugs
|
||||||
|
* [utils] `traverse_obj`: Fix several behavioral problems by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [utils] Don't use Content-length with encoding by [felixonmars](https://github.com/felixonmars)
|
||||||
|
* [utils] Fix `time_seconds` to use the provided TZ by [Grub4K](https://github.com/Grub4K), [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [utils] Fix race condition in `make_dir` by [aionescu](https://github.com/aionescu)
|
||||||
|
* [utils] Use local kernel32 for file locking on Windows by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [compat_utils] Improve `passthrough_module`
|
||||||
|
* [compat_utils] Simplify `EnhancedModule`
|
||||||
|
* [build] Update pyinstaller
|
||||||
|
* [pyinst] Fix for pyinstaller 5.8
|
||||||
|
* [devscripts] Provide `pyinstaller` hooks
|
||||||
|
* [devscripts/pyinstaller] Analyze sub-modules of `Cryptodome`
|
||||||
|
* [cleanup] Misc fixes and cleanup
|
||||||
|
* [extractor/anchorfm] Add episode extractor by [HobbyistDev](https://github.com/HobbyistDev), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/boxcast] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/ebay] Add extractor by [JChris246](https://github.com/JChris246)
|
||||||
|
* [extractor/hypergryph] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/NZOnScreen] Add extractor by [gregsadetsky](https://github.com/gregsadetsky), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/rozhlas] Add extractor RozhlasVltavaIE by [amra](https://github.com/amra)
|
||||||
|
* [extractor/tempo] Add IVXPlayer extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/txxx] Add extractors by [chio0hai](https://github.com/chio0hai)
|
||||||
|
* [extractor/vocaroo] Add extractor by [SuperSonicHub1](https://github.com/SuperSonicHub1), [qbnu](https://github.com/qbnu)
|
||||||
|
* [extractor/wrestleuniverse] Add extractors by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/yappy] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [dirkf](https://github.com/dirkf)
|
||||||
|
* [extractor/youtube] **Fix `uploader_id` extraction** by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/youtube] Add hyperpipe instances by [Generator](https://github.com/Generator)
|
||||||
|
* [extractor/youtube] Handle `consent.youtube`
|
||||||
|
* [extractor/youtube] Support `/live/` URL
|
||||||
|
* [extractor/youtube] Update invidious and piped instances by [rohieb](https://github.com/rohieb)
|
||||||
|
* [extractor/91porn] Fix title and comment extraction by [pmitchell86](https://github.com/pmitchell86)
|
||||||
|
* [extractor/AbemaTV] Cache user token whenever appropriate by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/bfmtv] Support `rmc` prefix by [carusocr](https://github.com/carusocr)
|
||||||
|
* [extractor/biliintl] Add intro and ending chapters by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/clyp] Support `wav` by [qulaz](https://github.com/qulaz)
|
||||||
|
* [extractor/crunchyroll] Add intro chapter by [ByteDream](https://github.com/ByteDream)
|
||||||
|
* [extractor/crunchyroll] Better message for premium videos
|
||||||
|
* [extractor/crunchyroll] Fix incorrect premium-only error by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/DouyuTV] Use new API by [hatienl0i261299](https://github.com/hatienl0i261299)
|
||||||
|
* [extractor/embedly] Embedded links may be for other extractors
|
||||||
|
* [extractor/freesound] Workaround invalid URL in webpage by [rebane2001](https://github.com/rebane2001)
|
||||||
|
* [extractor/GoPlay] Use new API by [jeroenj](https://github.com/jeroenj)
|
||||||
|
* [extractor/Hidive] Fix subtitles and age-restriction by [chexxor](https://github.com/chexxor)
|
||||||
|
* [extractor/huya] Support HD streams by [felixonmars](https://github.com/felixonmars)
|
||||||
|
* [extractor/moviepilot] Fix extractor by [panatexxa](https://github.com/panatexxa)
|
||||||
|
* [extractor/nbc] Fix `NBC` and `NBCStations` extractors by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/nbc] Fix XML parsing by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/nebula] Remove broken cookie support by [hheimbuerger](https://github.com/hheimbuerger)
|
||||||
|
* [extractor/nfl] Add `NFLPlus` extractors by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/niconico] Add support for like history by [Matumo](https://github.com/Matumo), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/nitter] Update instance list by [OIRNOIR](https://github.com/OIRNOIR)
|
||||||
|
* [extractor/npo] Fix extractor and add HD support by [seproDev](https://github.com/seproDev)
|
||||||
|
* [extractor/odkmedia] Add `OnDemandChinaEpisodeIE` by [HobbyistDev](https://github.com/HobbyistDev), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/pornez] Handle relative URLs in iframe by [JChris246](https://github.com/JChris246)
|
||||||
|
* [extractor/radiko] Fix format sorting for Time Free by [road-master](https://github.com/road-master)
|
||||||
|
* [extractor/rcs] Fix extractors by [nixxo](https://github.com/nixxo), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/reddit] Support user posts by [OMEGARAZER](https://github.com/OMEGARAZER)
|
||||||
|
* [extractor/rumble] Fix format sorting by [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/servus] Rewrite extractor by [Ashish0804](https://github.com/Ashish0804), [FrankZ85](https://github.com/FrankZ85), [StefanLobbenmeier](https://github.com/StefanLobbenmeier)
|
||||||
|
* [extractor/slideslive] Fix slides and chapters/duration by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/SportDeutschland] Fix extractor by [FriedrichRehren](https://github.com/FriedrichRehren)
|
||||||
|
* [extractor/Stripchat] Fix extractor by [JChris246](https://github.com/JChris246), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/tnaflix] Fix extractor by [bashonly](https://github.com/bashonly), [oxamun](https://github.com/oxamun)
|
||||||
|
* [extractor/tvp] Support `stream.tvp.pl` by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/twitter] Fix `--no-playlist` and add media `view_count` when using GraphQL by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/twitter] Fix graphql extraction on some tweets by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/vimeo] Fix `playerConfig` extraction by [LeoniePhiline](https://github.com/LeoniePhiline), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/viu] Add `ViuOTTIndonesiaIE` extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/vk] Fix playlists for new API by [the-marenga](https://github.com/the-marenga)
|
||||||
|
* [extractor/vlive] Replace with `VLiveWebArchiveIE` by [seproDev](https://github.com/seproDev)
|
||||||
|
* [extractor/ximalaya] Update album `_VALID_URL` by [carusocr](https://github.com/carusocr)
|
||||||
|
* [extractor/zdf] Use android API endpoint for UHD downloads by [seproDev](https://github.com/seproDev)
|
||||||
|
* [extractor/drtv] Fix bug in [ab4cbef](https://github.com/yt-dlp/yt-dlp/commit/ab4cbef) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
|
||||||
|
### 2023.01.06
|
||||||
|
|
||||||
|
* Fix config locations by [Grub4K](https://github.com/Grub4K), [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [downloader/aria2c] Disable native progress
|
||||||
|
* [utils] `mimetype2ext`: `weba` is not standard
|
||||||
|
* [utils] `windows_enable_vt_mode`: Better error handling
|
||||||
|
* [build] Add minimal `pyproject.toml`
|
||||||
|
* [update] Fix updater file removal on windows by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [cleanup] Misc fixes and cleanup
|
||||||
|
* [extractor/aitube] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/drtv] Add series extractors by [FrederikNS](https://github.com/FrederikNS)
|
||||||
|
* [extractor/volejtv] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/xanimu] Add extractor by [JChris246](https://github.com/JChris246)
|
||||||
|
* [extractor/youtube] Retry manifest refresh for live-from-start by [mzhou](https://github.com/mzhou)
|
||||||
|
* [extractor/biliintl] Add `/media` to `VALID_URL` by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/biliIntl] Add fallback to `video_data` by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/crunchyroll:show] Add `language` to entries by [Chrissi2812](https://github.com/Chrissi2812)
|
||||||
|
* [extractor/joj] Fix extractor by [OndrejBakan](https://github.com/OndrejBakan), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/nbc] Update graphql query by [jacobtruman](https://github.com/jacobtruman)
|
||||||
|
* [extractor/reddit] Add subreddit as `channel_id` by [gschizas](https://github.com/gschizas)
|
||||||
|
* [extractor/tiktok] Add `TikTokLive` extractor by [JC-Chung](https://github.com/JC-Chung)
|
||||||
|
|
||||||
|
### 2023.01.02
|
||||||
|
|
||||||
|
* **Improve plugin architecture** by [Grub4K](https://github.com/Grub4K), [coletdjnz](https://github.com/coletdjnz), [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* Plugins can be loaded in any distribution of yt-dlp (binary, pip, source, etc.) and can be distributed and installed as packages. See [the readme](https://github.com/yt-dlp/yt-dlp/tree/05997b6e98e638d97d409c65bb5eb86da68f3b64#plugins) for more information
|
||||||
|
* Add `--compat-options 2021,2022`
|
||||||
|
* This allows devs to change defaults and make other potentially breaking changes more easily. If you need everything to work exactly as-is, put Use `--compat 2022` in your config to guard against future compat changes.
|
||||||
|
* [downloader/aria2c] Native progress for aria2c via RPC by [Lesmiscore](https://github.com/Lesmiscore), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* Merge youtube-dl: Upto [commit/195f22f](https://github.com/ytdl-org/youtube-dl/commit/195f22f6) by [Grub4K](https://github.com/Grub4K), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* Add pre-processor stage `video`
|
||||||
|
* Let `--parse/replace-in-metadata` run at any post-processing stage
|
||||||
|
* Add `--enable-file-urls` by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* Add new field `aspect_ratio`
|
||||||
|
* Add `ac4` to known codecs
|
||||||
|
* Add `weba` to known extensions
|
||||||
|
* [FFmpegVideoConvertor] Add `gif` to `--recode-video`
|
||||||
|
* Add message when there are no subtitles/thumbnails
|
||||||
|
* Deprioritize HEVC-over-FLV formats by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* Make early reject of `--match-filter` stricter
|
||||||
|
* Fix `--cookies-from-browser` CLI parsing
|
||||||
|
* Fix `original_url` in playlists
|
||||||
|
* Fix bug in writing playlist info-json
|
||||||
|
* Fix bugs in `PlaylistEntries`
|
||||||
|
* [downloader/ffmpeg] Fix headers for video+audio formats by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor] Add a way to distinguish IEs that returns only videos
|
||||||
|
* [extractor] Implement universal format sorting and deprecate `_sort_formats`
|
||||||
|
* [extractor] Let `_extract_format` functions obey `--ignore-no-formats`
|
||||||
|
* [extractor/generic] Add `fragment_query` extractor arg for DASH and HLS by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/generic] Decode unicode-escaped embed URLs by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/generic] Don't report redirect to https
|
||||||
|
* [extractor/generic] Fix JSON LD manifest extraction by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/generic] Use `Accept-Encoding: identity` for initial request by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [FormatSort] Add `mov` to `vext`
|
||||||
|
* [jsinterp] Escape regex that looks like nested set
|
||||||
|
* [webvtt] Handle premature EOF by [flashdagger](https://github.com/flashdagger)
|
||||||
|
* [utils] `classproperty`: Add cache support
|
||||||
|
* [utils] `get_exe_version`: Detect broken executables by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [utils] `js_to_json`: Fix bug in [f55523c](https://github.com/yt-dlp/yt-dlp/commit/f55523c) by [ChillingPepper](https://github.com/ChillingPepper), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [utils] Make `ExtractorError` mutable
|
||||||
|
* [utils] Move `FileDownloader.parse_bytes` into utils
|
||||||
|
* [utils] Move format sorting code into `utils`
|
||||||
|
* [utils] `windows_enable_vt_mode`: Proper implementation by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [update] Workaround [#5632](https://github.com/yt-dlp/yt-dlp/issues/5632)
|
||||||
|
* [docs] Improvements
|
||||||
|
* [cleanup] Misc fixes and cleanup
|
||||||
|
* [cleanup] Use `random.choices` by [freezboltz](https://github.com/freezboltz)
|
||||||
|
* [extractor/airtv] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/amazonminitv] Add extractors by [GautamMKGarg](https://github.com/GautamMKGarg), [nyuszika7h](https://github.com/nyuszika7h)
|
||||||
|
* [extractor/beatbump] Add extractors by [Bobscorn](https://github.com/Bobscorn), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/europarl] Add EuroParlWebstream extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/kanal2] Add extractor by [bashonly](https://github.com/bashonly), [glensc](https://github.com/glensc), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/kankanews] Add extractor by [synthpop123](https://github.com/synthpop123)
|
||||||
|
* [extractor/kick] Add extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/mediastream] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/noice] Add NoicePodcast extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/oneplace] Add OnePlacePodcast extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/rumble] Add RumbleIE extractor by [flashdagger](https://github.com/flashdagger)
|
||||||
|
* [extractor/screencastify] Add extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/trtcocuk] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/Veoh] Add user extractor by [tntmod54321](https://github.com/tntmod54321)
|
||||||
|
* [extractor/videoken] Add extractors by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/webcamerapl] Add extractor by [milkknife](https://github.com/milkknife)
|
||||||
|
* [extractor/amazon] Add `AmazonReviews` extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/netverse] Add `NetverseSearch` extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/vimeo] Add `VimeoProIE` by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/xiami] Remove extractors by [synthpop123](https://github.com/synthpop123)
|
||||||
|
* [extractor/youtube] Add `piped.video` by [Bnyro](https://github.com/Bnyro)
|
||||||
|
* [extractor/youtube] Consider language in format de-duplication
|
||||||
|
* [extractor/youtube] Extract DRC formats
|
||||||
|
* [extractor/youtube] Fix `ytuser:`
|
||||||
|
* [extractor/youtube] Fix bug in handling of music URLs
|
||||||
|
* [extractor/youtube] Subtitles cannot be translated to `und`
|
||||||
|
* [extractor/youtube:tab] Extract metadata from channel items by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/ARD] Add vtt subtitles by [CapacitorSet](https://github.com/CapacitorSet)
|
||||||
|
* [extractor/ArteTV] Extract chapters by [bashonly](https://github.com/bashonly), [iw0nderhow](https://github.com/iw0nderhow)
|
||||||
|
* [extractor/bandcamp] Add `album_artist` by [stelcodes](https://github.com/stelcodes)
|
||||||
|
* [extractor/bilibili] Fix `--no-playlist` for anthology
|
||||||
|
* [extractor/bilibili] Improve `_VALID_URL` by [skbeh](https://github.com/skbeh)
|
||||||
|
* [extractor/biliintl:series] Make partial download of series faster
|
||||||
|
* [extractor/BiliLive] Fix extractor
|
||||||
|
* [extractor/brightcove] Add `BrightcoveNewBaseIE` and fix embed extraction
|
||||||
|
* [extractor/cda] Support premium and misc improvements by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/ciscowebex] Support password-protected videos by [damianoamatruda](https://github.com/damianoamatruda)
|
||||||
|
* [extractor/curiositystream] Fix auth by [mnn](https://github.com/mnn)
|
||||||
|
* [extractor/embedly] Handle vimeo embeds
|
||||||
|
* [extractor/fifa] Fix Preplay extraction by [dirkf](https://github.com/dirkf)
|
||||||
|
* [extractor/foxsports] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/gronkh] Fix `_VALID_URL` by [muddi900](https://github.com/muddi900)
|
||||||
|
* [extractor/hotstar] Improve format metadata
|
||||||
|
* [extractor/iqiyi] Fix `Iq` JS regex by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/la7] Improve extractor by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor/mediaset] Better embed detection and error messages by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor/mixch] Support `--wait-for-video`
|
||||||
|
* [extractor/naver] Improve `_VALID_URL` for `NaverNowIE` by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/naver] Treat fan subtitles as separate language
|
||||||
|
* [extractor/netverse] Extract comments by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/nosnl] Add support for /video by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/odnoklassniki] Extract subtitles by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/pinterest] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/plutotv] Fix videos with non-zero start by [digitall](https://github.com/digitall)
|
||||||
|
* [extractor/polskieradio] Adapt to next.js redesigns by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/reddit] Add vcodec to fallback format by [chengzhicn](https://github.com/chengzhicn)
|
||||||
|
* [extractor/reddit] Extract crossposted media by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/reddit] Extract video embeds in text posts by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/rutube] Support private videos by [mexus](https://github.com/mexus)
|
||||||
|
* [extractor/sibnet] Separate from VKIE
|
||||||
|
* [extractor/slideslive] Fix extractor by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/slideslive] Support embeds and slides by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/soundcloud] Support user permalink by [nosoop](https://github.com/nosoop)
|
||||||
|
* [extractor/spankbang] Fix extractor by [JChris246](https://github.com/JChris246)
|
||||||
|
* [extractor/stv] Detect DRM
|
||||||
|
* [extractor/swearnet] Fix description bug
|
||||||
|
* [extractor/tencent] Fix geo-restricted video by [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/tiktok] Fix subs, `DouyinIE`, improve `_VALID_URL` by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/tiktok] Update `_VALID_URL`, add `api_hostname` arg by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/tiktok] Update API hostname by [redraskal](https://github.com/redraskal)
|
||||||
|
* [extractor/twitcasting] Fix videos with password by [Spicadox](https://github.com/Spicadox), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/twitter] Heed `--no-playlist` for multi-video tweets by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/twitter] Refresh guest token when expired by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/twitter:spaces] Add `Referer` to m3u8 by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor/udemy] Fix lectures that have no URL and detect DRM
|
||||||
|
* [extractor/unsupported] Add more URLs
|
||||||
|
* [extractor/urplay] Support for audio-only formats by [barsnick](https://github.com/barsnick)
|
||||||
|
* [extractor/wistia] Improve extension detection by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/yle_areena] Support restricted videos by [docbender](https://github.com/docbender)
|
||||||
|
* [extractor/youku] Fix extractor by [KurtBestor](https://github.com/KurtBestor)
|
||||||
|
* [extractor/youporn] Fix metadata by [marieell](https://github.com/marieell)
|
||||||
|
* [extractor/redgifs] Fix bug in [8c188d5](https://github.com/yt-dlp/yt-dlp/commit/8c188d5d09177ed213a05c900d3523867c5897fd)
|
||||||
|
|
||||||
|
|
||||||
|
### 2022.11.11
|
||||||
|
|
||||||
|
* Merge youtube-dl: Upto [commit/de39d12](https://github.com/ytdl-org/youtube-dl/commit/de39d128)
|
||||||
|
* Backport SSL configuration from Python 3.10 by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* Do more processing in `--flat-playlist`
|
||||||
|
* Fix `--list` options not implying `-s` in some cases by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||||
|
* Fix end time of clips by [cruel-efficiency](https://github.com/cruel-efficiency)
|
||||||
|
* Fix for `formats=None`
|
||||||
|
* Write API params in debug head
|
||||||
|
* [outtmpl] Ensure ASCII in json and add option for Unicode
|
||||||
|
* [SponsorBlock] Add `type` field, obey `--retry-sleep extractor`, relax duration check for large segments
|
||||||
|
* [SponsorBlock] **Support `chapter` category** by [ajayyy](https://github.com/ajayyy), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [ThumbnailsConvertor] Fix filename escaping by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [ModifyChapters] Handle the entire video being marked for removal
|
||||||
|
* [embedthumbnail] Fix thumbnail name in mp3 by [How-Bout-No](https://github.com/How-Bout-No)
|
||||||
|
* [downloader/fragment] HLS download can continue without first fragment
|
||||||
|
* [cookies] Improve `LenientSimpleCookie` by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [jsinterp] Improve separating regex
|
||||||
|
* [extractor/common] Fix `fatal=False` for `_search_nuxt_data`
|
||||||
|
* [extractor/common] Improve `_generic_title`
|
||||||
|
* [extractor/common] Fix `json_ld` type checks by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/generic] Separate embed extraction into own function
|
||||||
|
* [extractor/generic:quoted-html] Add extractor by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/unsupported] Raise error on known DRM-only sites by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [utils] `js_to_json`: Improve escape handling by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [utils] `strftime_or_none`: Workaround Python bug on Windows
|
||||||
|
* [utils] `traverse_obj`: Always return list when branching, allow `re.Match` objects by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [build, test] Harden workflows' security by [sashashura](https://github.com/sashashura)
|
||||||
|
* [build] `py2exe`: Migrate to freeze API by [SG5](https://github.com/SG5), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [build] Create `armv7l` and `aarch64` releases by [MrOctopus](https://github.com/MrOctopus), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [build] Make linux binary truly standalone using `conda` by [mlampe](https://github.com/mlampe)
|
||||||
|
* [build] Replace `set-output` with `GITHUB_OUTPUT` by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [update] Use error code `100` for update errors
|
||||||
|
* [compat] Fix `shutils.move` in restricted ACL mode on BSD by [ClosedPort22](https://github.com/ClosedPort22), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [docs, devscripts] Document `pyinst`'s argument passthrough by [jahway603](https://github.com/jahway603)
|
||||||
|
* [test] Allow `extract_flat` in download tests by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [cleanup] Misc fixes and cleanup by [pukkandan](https://github.com/pukkandan), [Alienmaster](https://github.com/Alienmaster)
|
||||||
|
* [extractor/aeon] Add extractor by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||||
|
* [extractor/agora] Add extractors by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/camsoda] Add extractor by [zulaport](https://github.com/zulaport)
|
||||||
|
* [extractor/cinetecamilano] Add extractor by [timendum](https://github.com/timendum)
|
||||||
|
* [extractor/deuxm] Add extractors by [CrankDatSouljaBoy](https://github.com/CrankDatSouljaBoy)
|
||||||
|
* [extractor/genius] Add extractors by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/japandiet] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/listennotes] Add extractor by [lksj](https://github.com/lksj), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/nos.nl] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/oftv] Add extractors by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||||
|
* [extractor/podbayfm] Add extractor by [schnusch](https://github.com/schnusch)
|
||||||
|
* [extractor/qingting] Add extractor by [bashonly](https://github.com/bashonly), [changren-wcr](https://github.com/changren-wcr)
|
||||||
|
* [extractor/screen9] Add extractor by [tpikonen](https://github.com/tpikonen)
|
||||||
|
* [extractor/swearnet] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/YleAreena] Add extractor by [pukkandan](https://github.com/pukkandan), [vitkhab](https://github.com/vitkhab)
|
||||||
|
* [extractor/zeenews] Add extractor by [m4tu4g](https://github.com/m4tu4g), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/youtube:tab] **Update tab handling for redesign** by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* Channel URLs download all uploads of the channel as multiple playlists, separated by tab
|
||||||
|
* [extractor/youtube] Differentiate between no comments and disabled comments by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube] Extract `concurrent_view_count` for livestreams by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube] Fix `duration` for premieres by [nosoop](https://github.com/nosoop)
|
||||||
|
* [extractor/youtube] Fix `live_status` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/youtube] Ignore incomplete data error for comment replies by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube] Improve chapter parsing from description
|
||||||
|
* [extractor/youtube] Mark videos as fully watched by [bsun0000](https://github.com/bsun0000)
|
||||||
|
* [extractor/youtube] Update piped instances by [Generator](https://github.com/Generator)
|
||||||
|
* [extractor/youtube] Update playlist metadata extraction for new layout by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube:tab] Fix video metadata from tabs by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube:tab] Let `approximate_date` return timestamp
|
||||||
|
* [extractor/americastestkitchen] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/bbc] Support onion domains by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||||
|
* [extractor/bilibili] Add chapters and misc cleanup by [lockmatrix](https://github.com/lockmatrix), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/bilibili] Fix BilibiliIE and Bangumi extractors by [lockmatrix](https://github.com/lockmatrix), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/bitchute] Better error for geo-restricted videos by [flashdagger](https://github.com/flashdagger)
|
||||||
|
* [extractor/bitchute] Improve `BitChuteChannelIE` by [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/bitchute] Simplify extractor by [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/cda] Support login through API by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/crunchyroll] Beta is now the only layout by [tejing1](https://github.com/tejing1)
|
||||||
|
* [extractor/detik] Avoid unnecessary extraction
|
||||||
|
* [extractor/doodstream] Remove extractor
|
||||||
|
* [extractor/dplay] Add MotorTrendOnDemand extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/epoch] Support videos without data-trailer by [gibson042](https://github.com/gibson042), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/fox] Extract thumbnail by [vitkhab](https://github.com/vitkhab)
|
||||||
|
* [extractor/foxnews] Add `FoxNewsVideo` extractor
|
||||||
|
* [extractor/hotstar] Add season support by [m4tu4g](https://github.com/m4tu4g)
|
||||||
|
* [extractor/hotstar] Refactor v1 API calls
|
||||||
|
* [extractor/iprima] Make json+ld non-fatal by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/iq] Increase phantomjs timeout
|
||||||
|
* [extractor/kaltura] Support playlists by [jwoglom](https://github.com/jwoglom), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/lbry] Authenticate with cookies by [flashdagger](https://github.com/flashdagger)
|
||||||
|
* [extractor/livestreamfails] Support posts by [invertico](https://github.com/invertico)
|
||||||
|
* [extractor/mlb] Add `MLBArticle` extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/mxplayer] Improve extractor by [m4tu4g](https://github.com/m4tu4g)
|
||||||
|
* [extractor/niconico] Always use HTTPS for requests
|
||||||
|
* [extractor/nzherald] Support new video embed by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/odnoklassniki] Support boosty.to embeds by [Lesmiscore](https://github.com/Lesmiscore), [megapro17](https://github.com/megapro17), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/paramountplus] Update API token by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/reddit] Add fallback format by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/redgifs] Fix extractors by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/redgifs] Refresh auth token for 401 by [endotronic](https://github.com/endotronic), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/rumble] Add HLS formats and extract more metadata by [flashdagger](https://github.com/flashdagger)
|
||||||
|
* [extractor/sbs] Improve `_VALID_URL` by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/skyit] Fix extractors by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor/stripchat] Fix hostname for HLS stream by [zulaport](https://github.com/zulaport)
|
||||||
|
* [extractor/stripchat] Improve error message by [freezboltz](https://github.com/freezboltz)
|
||||||
|
* [extractor/telegram] Add playlist support and more metadata by [bashonly](https://github.com/bashonly), [bsun0000](https://github.com/bsun0000)
|
||||||
|
* [extractor/Tnaflix] Fix for HTTP 500 by [SG5](https://github.com/SG5), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/tubitv] Better DRM detection by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/tvp] Update extractors by [selfisekai](https://github.com/selfisekai)
|
||||||
|
* [extractor/twitcasting] Fix `data-movie-playlist` extraction by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/twitter] Add onion site to `_VALID_URL` by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||||
|
* [extractor/twitter] Add Spaces extractor and GraphQL API by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly), [nixxo](https://github.com/nixxo), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/twitter] Support multi-video posts by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/uktvplay] Fix `_VALID_URL`
|
||||||
|
* [extractor/viu] Support subtitles of on-screen text by [tkgmomosheep](https://github.com/tkgmomosheep)
|
||||||
|
* [extractor/VK] Fix playlist URLs by [the-marenga](https://github.com/the-marenga)
|
||||||
|
* [extractor/vlive] Extract `release_timestamp`
|
||||||
|
* [extractor/voot] Improve `_VALID_URL` by [freezboltz](https://github.com/freezboltz)
|
||||||
|
* [extractor/wordpress:mb.miniAudioPlayer] Add embed extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/YoutubeWebArchive] Improve metadata extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/zee5] Improve `_VALID_URL` by [m4tu4g](https://github.com/m4tu4g)
|
||||||
|
* [extractor/zenyandex] Fix extractors by [lksj](https://github.com/lksj), [puc9](https://github.com/puc9), [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
|
|
||||||
|
### 2022.10.04
|
||||||
|
|
||||||
|
* Allow a `set` to be passed as `download_archive` by [pukkandan](https://github.com/pukkandan), [bashonly](https://github.com/bashonly)
|
||||||
|
* Allow open ranges for time ranges by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* Allow plugin extractors to replace the built-in ones
|
||||||
|
* Don't download entire video when no matching `--download-sections`
|
||||||
|
* Fix `--config-location -`
|
||||||
|
* Improve [5736d79](https://github.com/yt-dlp/yt-dlp/pull/5044/commits/5736d79172c47ff84740d5720467370a560febad)
|
||||||
|
* Fix for when playlists don't have `webpage_url`
|
||||||
|
* Support environment variables in `--ffmpeg-location`
|
||||||
|
* Workaround `libc_ver` not be available on Windows Store version of Python
|
||||||
|
* [outtmpl] Curly braces to filter keys by [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [outtmpl] Make `%s` work in strfformat for all systems
|
||||||
|
* [jsinterp] Workaround operator associativity issue
|
||||||
|
* [cookies] Let `_get_mac_keyring_password` fail gracefully
|
||||||
|
* [cookies] Parse cookies leniently by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [phantomjs] Fix bug in [587021c](https://github.com/yt-dlp/yt-dlp/commit/587021cd9f717181b44e881941aca3f8d753758b) by [elyse0](https://github.com/elyse0)
|
||||||
|
* [downloader/aria2c] Fix filename containing leading whitespace by [std-move](https://github.com/std-move)
|
||||||
|
* [downloader/ism] Support ec-3 codec by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor] Fix `fatal=False` in `RetryManager`
|
||||||
|
* [extractor] Improve json-ld extraction
|
||||||
|
* [extractor] Make `_search_json` able to parse lists
|
||||||
|
* [extractor] Escape `%` in `representation_id` of m3u8
|
||||||
|
* [extractor/generic] Pass through referer from json-ld
|
||||||
|
* [utils] `base_url`: URL paths can contain `&` by [elyse0](https://github.com/elyse0)
|
||||||
|
* [utils] `js_to_json`: Improve
|
||||||
|
* [utils] `Popen.run`: Fix default return in binary mode
|
||||||
|
* [utils] `traverse_obj`: Rewrite, document and add tests by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [devscripts] `make_lazy_extractors`: Fix for Docker by [josanabr](https://github.com/josanabr)
|
||||||
|
* [docs] Misc Improvements
|
||||||
|
* [cleanup] Misc fixes and cleanup by [pukkandan](https://github.com/pukkandan), [gamer191](https://github.com/gamer191)
|
||||||
|
* [extractor/24tv.ua] Add extractors by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/BerufeTV] Add extractor by [Fabi019](https://github.com/Fabi019)
|
||||||
|
* [extractor/booyah] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/bundesliga] Add extractor by [Fabi019](https://github.com/Fabi019)
|
||||||
|
* [extractor/GoPlay] Add extractor by [CNugteren](https://github.com/CNugteren), [basrieter](https://github.com/basrieter), [jeroenj](https://github.com/jeroenj)
|
||||||
|
* [extractor/iltalehti] Add extractor by [tpikonen](https://github.com/tpikonen)
|
||||||
|
* [extractor/IsraelNationalNews] Add extractor by [Bobscorn](https://github.com/Bobscorn)
|
||||||
|
* [extractor/mediaworksnzvod] Add extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/MicrosoftEmbed] Add extractor by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||||
|
* [extractor/nbc] Add NBCStations extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/onenewsnz] Add extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/prankcast] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [columndeeply](https://github.com/columndeeply)
|
||||||
|
* [extractor/Smotrim] Add extractor by [Lesmiscore](https://github.com/Lesmiscore), [nikita-moor](https://github.com/nikita-moor)
|
||||||
|
* [extractor/tencent] Add Iflix extractor by [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/unscripted] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/adobepass] Add MSO AlticeOne (Optimum TV) by [CplPwnies](https://github.com/CplPwnies)
|
||||||
|
* [extractor/youtube] **Download `post_live` videos from start** by [Lesmiscore](https://github.com/Lesmiscore), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/youtube] Add support for Shorts audio pivot feed by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/youtube] Detect `lazy-load-for-videos` embeds
|
||||||
|
* [extractor/youtube] Do not warn on duplicate chapters
|
||||||
|
* [extractor/youtube] Fix video like count extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube] Support changing extraction language by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube:tab] Improve continuation items extraction
|
||||||
|
* [extractor/youtube:tab] Support `reporthistory` page
|
||||||
|
* [extractor/amazonstore] Fix JSON extraction by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/amazonstore] Retry to avoid captcha page by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/animeondemand] Remove extractor by [TokyoBlackHole](https://github.com/TokyoBlackHole)
|
||||||
|
* [extractor/anvato] Fix extractor and refactor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/artetv] Remove duplicate stream urls by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/audioboom] Support direct URLs and refactor by [pukkandan](https://github.com/pukkandan), [tpikonen](https://github.com/tpikonen)
|
||||||
|
* [extractor/bandcamp] Extract `uploader_url`
|
||||||
|
* [extractor/bilibili] Add space.bilibili extractors by [lockmatrix](https://github.com/lockmatrix)
|
||||||
|
* [extractor/BilibiliSpace] Fix extractor and better error message by [lockmatrix](https://github.com/lockmatrix)
|
||||||
|
* [extractor/BiliIntl] Support uppercase lang in `_VALID_URL` by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/BiliIntlSeries] Fix `_VALID_URL`
|
||||||
|
* [extractor/bongacams] Update `_VALID_URL` by [0xGodspeed](https://github.com/0xGodspeed)
|
||||||
|
* [extractor/crunchyroll:beta] Improve handling of hardsubs by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/detik] Generalize extractors by [HobbyistDev](https://github.com/HobbyistDev), [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/dplay:italy] Add default authentication by [Timendum](https://github.com/Timendum)
|
||||||
|
* [extractor/heise] Fix extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/holodex] Fix `_VALID_URL` by [LiviaMedeiros](https://github.com/LiviaMedeiros)
|
||||||
|
* [extractor/hrfensehen] Fix extractor by [snapdgn](https://github.com/snapdgn)
|
||||||
|
* [extractor/hungama] Add subtitle by [GautamMKGarg](https://github.com/GautamMKGarg), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/instagram] Extract more metadata by [pritam20ps05](https://github.com/pritam20ps05)
|
||||||
|
* [extractor/JWPlatform] Fix extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/malltv] Fix video_id extraction by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/MLBTV] Detect live streams
|
||||||
|
* [extractor/motorsport] Support native embeds
|
||||||
|
* [extractor/Mxplayer] Fix extractor by [itachi-19](https://github.com/itachi-19)
|
||||||
|
* [extractor/nebula] Add nebula.tv by [tannertechnology](https://github.com/tannertechnology)
|
||||||
|
* [extractor/nfl] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/ondemandkorea] Update `jw_config` regex by [julien-hadleyjack](https://github.com/julien-hadleyjack)
|
||||||
|
* [extractor/paramountplus] Better DRM detection by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/patreon] Sort formats
|
||||||
|
* [extractor/rcs] Fix embed extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/redgifs] Fix extractor by [jhwgh1968](https://github.com/jhwgh1968)
|
||||||
|
* [extractor/rutube] Fix `_EMBED_REGEX` by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/RUTV] Fix warnings for livestreams by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/soundcloud:search] More metadata in `--flat-playlist` by [SuperSonicHub1](https://github.com/SuperSonicHub1)
|
||||||
|
* [extractor/telegraaf] Use mobile GraphQL API endpoint by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/tennistv] Fix timestamp by [zenerdi0de](https://github.com/zenerdi0de)
|
||||||
|
* [extractor/tiktok] Fix TikTokIE by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/triller] Fix auth token by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/trovo] Fix extractors by [Mehavoid](https://github.com/Mehavoid)
|
||||||
|
* [extractor/tv2] Support new url format by [tobi1805](https://github.com/tobi1805)
|
||||||
|
* [extractor/web.archive:youtube] Fix `_YT_INITIAL_PLAYER_RESPONSE_RE`
|
||||||
|
* [extractor/wistia] Add support for channels by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/wistia] Match IDs in embed URLs by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/wordpress:playlist] Add generic embed extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/yandexvideopreview] Update `_VALID_URL` by [Grub4K](https://github.com/Grub4K)
|
||||||
|
* [extractor/zee5] Fix `_VALID_URL` by [m4tu4g](https://github.com/m4tu4g)
|
||||||
|
* [extractor/zee5] Generate device ids by [freezboltz](https://github.com/freezboltz)
|
||||||
|
|
||||||
|
|
||||||
|
### 2022.09.01
|
||||||
|
|
||||||
|
* Add option `--use-extractors`
|
||||||
|
* Merge youtube-dl: Upto [commit/ed5c44e](https://github.com/ytdl-org/youtube-dl/commit/ed5c44e7)
|
||||||
|
* Add yt-dlp version to infojson
|
||||||
|
* Fix `--break-per-url --max-downloads`
|
||||||
|
* Fix bug in `--alias`
|
||||||
|
* [cookies] Support firefox container in `--cookies-from-browser` by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [downloader/external] Smarter detection of executable
|
||||||
|
* [extractor/generic] Don't return JW player without formats
|
||||||
|
* [FormatSort] Fix `aext` for `--prefer-free-formats`
|
||||||
|
* [jsinterp] Various improvements by [pukkandan](https://github.com/pukkandan), [dirkf](https://github.com/dirkf), [elyse0](https://github.com/elyse0)
|
||||||
|
* [cache] Mechanism to invalidate old cache
|
||||||
|
* [utils] Add `deprecation_warning`
|
||||||
|
* [utils] Add `orderedSet_from_options`
|
||||||
|
* [utils] `Popen`: Restore `LD_LIBRARY_PATH` when using PyInstaller by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [build] `make tar` should not follow `DESTDIR` by [satan1st](https://github.com/satan1st)
|
||||||
|
* [build] Update pyinstaller by [shirt-dev](https://github.com/shirt-dev)
|
||||||
|
* [test] Fix `test_youtube_signature`
|
||||||
|
* [cleanup] Misc fixes and cleanup by [DavidH-2022](https://github.com/DavidH-2022), [MrRawes](https://github.com/MrRawes), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [extractor/epoch] Add extractor by [tejasa97](https://github.com/tejasa97)
|
||||||
|
* [extractor/eurosport] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
|
* [extractor/IslamChannel] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/newspicks] Add extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [extractor/triller] Add extractor by [bashonly](https://github.com/bashonly)
|
||||||
|
* [extractor/VQQ] Add extractors by [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/youtube] Improvements to nsig extraction
|
||||||
|
* [extractor/youtube] Fix bug in format sorting
|
||||||
|
* [extractor/youtube] Update iOS Innertube clients by [SamantazFox](https://github.com/SamantazFox)
|
||||||
|
* [extractor/youtube] Use device-specific user agent by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/youtube] Add `--compat-option no-youtube-prefer-utc-upload-date` by [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
* [extractor/arte] Bug fix by [cgrigis](https://github.com/cgrigis)
|
||||||
|
* [extractor/bilibili] Extract `flac` with premium account by [jackyyf](https://github.com/jackyyf)
|
||||||
|
* [extractor/BiliBiliSearch] Don't sort by date
|
||||||
|
* [extractor/BiliBiliSearch] Fix infinite loop
|
||||||
|
* [extractor/bitchute] Mark errors as expected
|
||||||
|
* [extractor/crunchyroll:beta] Use anonymous access by [tejing1](https://github.com/tejing1)
|
||||||
|
* [extractor/huya] Fix stream extraction by [ohaiibuzzle](https://github.com/ohaiibuzzle)
|
||||||
|
* [extractor/medaltv] Fix extraction by [xenova](https://github.com/xenova)
|
||||||
|
* [extractor/mediaset] Fix embed extraction
|
||||||
|
* [extractor/mixcloud] All formats are audio-only
|
||||||
|
* [extractor/rtbf] Fix jwt extraction by [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/screencastomatic] Support `--video-password` by [shreyasminocha](https://github.com/shreyasminocha)
|
||||||
|
* [extractor/stripchat] Don't modify input URL by [dfaker](https://github.com/dfaker)
|
||||||
|
* [extractor/uktv] Improve `_VALID_URL` by [dirkf](https://github.com/dirkf)
|
||||||
|
* [extractor/vimeo:user] Fix `_VALID_URL`
|
||||||
|
|
||||||
|
|
||||||
|
### 2022.08.19
|
||||||
|
|
||||||
|
* Fix bug in `--download-archive`
|
||||||
|
* [jsinterp] **Fix for new youtube players** and related improvements by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [phantomjs] Add function to execute JS without a DOM by [MinePlayersPE](https://github.com/MinePlayersPE), [pukkandan](https://github.com/pukkandan)
|
||||||
|
* [build] Exclude devscripts from installs by [Lesmiscore](https://github.com/Lesmiscore)
|
||||||
|
* [cleanup] Misc fixes and cleanup
|
||||||
|
* [extractor/youtube] **Add fallback to phantomjs** for nsig
|
||||||
|
* [extractor/youtube] Fix error reporting of "Incomplete data"
|
||||||
|
* [extractor/youtube] Improve format sorting for IOS formats
|
||||||
|
* [extractor/youtube] Improve signature caching
|
||||||
|
* [extractor/instagram] Fix extraction by [bashonly](https://github.com/bashonly), [pritam20ps05](https://github.com/pritam20ps05)
|
||||||
|
* [extractor/rai] Minor fix by [nixxo](https://github.com/nixxo)
|
||||||
|
* [extractor/rtbf] Fix stream extractor by [elyse0](https://github.com/elyse0)
|
||||||
|
* [extractor/SovietsCloset] Fix extractor by [ChillingPepper](https://github.com/ChillingPepper)
|
||||||
|
* [extractor/zattoo] Fix Zattoo resellers by [goggle](https://github.com/goggle)
|
||||||
|
|
||||||
### 2022.08.14
|
### 2022.08.14
|
||||||
|
|
||||||
@@ -19,8 +605,7 @@ ### 2022.08.14
|
|||||||
* [extractor] Fix format sorting of `channels`
|
* [extractor] Fix format sorting of `channels`
|
||||||
* [ffmpeg] Disable avconv unless `--prefer-avconv`
|
* [ffmpeg] Disable avconv unless `--prefer-avconv`
|
||||||
* [ffmpeg] Smarter detection of ffprobe filename
|
* [ffmpeg] Smarter detection of ffprobe filename
|
||||||
* [patreon] Ignore erroneous media attachments by [coletdjnz](https://github.com/coletdjnz)
|
* [embedthumbnail] Detect `libatomicparsley.so`
|
||||||
* [postprocessor/embedthumbnail] Detect `libatomicparsley.so`
|
|
||||||
* [ThumbnailsConvertor] Fix conversion after `fixup_webp`
|
* [ThumbnailsConvertor] Fix conversion after `fixup_webp`
|
||||||
* [utils] Fix `get_compatible_ext`
|
* [utils] Fix `get_compatible_ext`
|
||||||
* [build] Fix changelog
|
* [build] Fix changelog
|
||||||
@@ -30,6 +615,7 @@ ### 2022.08.14
|
|||||||
* [cleanup] Misc fixes and cleanup
|
* [cleanup] Misc fixes and cleanup
|
||||||
* [extractor/moview] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
* [extractor/moview] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||||
* [extractor/parler] Add extractor by [palewire](https://github.com/palewire)
|
* [extractor/parler] Add extractor by [palewire](https://github.com/palewire)
|
||||||
|
* [extractor/patreon] Ignore erroneous media attachments by [coletdjnz](https://github.com/coletdjnz)
|
||||||
* [extractor/truth] Add extractor by [palewire](https://github.com/palewire)
|
* [extractor/truth] Add extractor by [palewire](https://github.com/palewire)
|
||||||
* [extractor/aenetworks] Add formats parameter by [jacobtruman](https://github.com/jacobtruman)
|
* [extractor/aenetworks] Add formats parameter by [jacobtruman](https://github.com/jacobtruman)
|
||||||
* [extractor/crunchyroll] Improve `_VALID_URL`s
|
* [extractor/crunchyroll] Improve `_VALID_URL`s
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ # Collaborators
|
|||||||
## [pukkandan](https://github.com/pukkandan)
|
## [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
[](https://ko-fi.com/pukkandan)
|
[](https://ko-fi.com/pukkandan)
|
||||||
|
[](https://github.com/sponsors/pukkandan)
|
||||||
|
|
||||||
* Owner of the fork
|
* Owner of the fork
|
||||||
|
|
||||||
@@ -25,8 +26,9 @@ ## [shirt](https://github.com/shirt-dev)
|
|||||||
|
|
||||||
## [coletdjnz](https://github.com/coletdjnz)
|
## [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
|
||||||
[](https://github.com/sponsors/coletdjnz)
|
[](https://github.com/sponsors/coletdjnz)
|
||||||
|
|
||||||
|
* Improved plugin architecture
|
||||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||||
@@ -42,7 +44,7 @@ ## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
|||||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||||
|
|
||||||
|
|
||||||
## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao)
|
## [Lesmiscore](https://github.com/Lesmiscore) <sub><sup>(nao20010128nao)</sup></sub>
|
||||||
|
|
||||||
**Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s
|
**Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s
|
||||||
**Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr
|
**Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr
|
||||||
@@ -50,3 +52,20 @@ ## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao)
|
|||||||
* Download live from start to end for YouTube
|
* Download live from start to end for YouTube
|
||||||
* Added support for new websites AbemaTV, mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
* Added support for new websites AbemaTV, mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
||||||
* Improved/fixed support for fc2, YahooJapanNews, tver, iwara etc
|
* Improved/fixed support for fc2, YahooJapanNews, tver, iwara etc
|
||||||
|
|
||||||
|
|
||||||
|
## [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
* `--update-to`, automated release, nightly builds
|
||||||
|
* `--cookies-from-browser` support for Firefox containers
|
||||||
|
* Added support for new websites Genius, Kick, NBCStations, Triller, VideoKen etc
|
||||||
|
* Improved/fixed support for Anvato, Brightcove, Instagram, ParamountPlus, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||||
|
|
||||||
|
|
||||||
|
## [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/Grub4K) [](https://github.com/sponsors/Grub4K)
|
||||||
|
|
||||||
|
* `--update-to`, automated release, nightly builds
|
||||||
|
* Rework internals like `traverse_obj`, various core refactors and bugs fixes
|
||||||
|
* Helped fix crunchyroll, Twitter, wrestleuniverse, wistia, slideslive etc
|
||||||
|
|||||||
14
Makefile
14
Makefile
@@ -17,8 +17,8 @@ pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
|||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.jpeg *.jpg *.m4a *.mpga *.m4v *.mhtml *.mkv *.mov \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
||||||
*.mp3 *.mp4 *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mp4 *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||||
@@ -33,7 +33,6 @@ completion-zsh: completions/zsh/_yt-dlp
|
|||||||
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
||||||
|
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
DESTDIR ?= .
|
|
||||||
BINDIR ?= $(PREFIX)/bin
|
BINDIR ?= $(PREFIX)/bin
|
||||||
MANDIR ?= $(PREFIX)/man
|
MANDIR ?= $(PREFIX)/man
|
||||||
SHAREDIR ?= $(PREFIX)/share
|
SHAREDIR ?= $(PREFIX)/share
|
||||||
@@ -75,17 +74,16 @@ offlinetest: codetest
|
|||||||
$(PYTHON) -m pytest -k "not download"
|
$(PYTHON) -m pytest -k "not download"
|
||||||
|
|
||||||
# XXX: This is hard to maintain
|
# XXX: This is hard to maintain
|
||||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat \
|
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/dependencies
|
||||||
yt_dlp/extractor/anvato_token_generator
|
|
||||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
||||||
mkdir -p zip
|
mkdir -p zip
|
||||||
for d in $(CODE_FOLDERS) ; do \
|
for d in $(CODE_FOLDERS) ; do \
|
||||||
mkdir -p zip/$$d ;\
|
mkdir -p zip/$$d ;\
|
||||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
done
|
done
|
||||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py zip/yt_dlp/*/*/*.py
|
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
||||||
mv zip/yt_dlp/__main__.py zip/
|
mv zip/yt_dlp/__main__.py zip/
|
||||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py yt_dlp/*/*/*.py __main__.py
|
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
||||||
rm -rf zip
|
rm -rf zip
|
||||||
echo '#!$(PYTHON)' > yt-dlp
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
cat yt-dlp.zip >> yt-dlp
|
cat yt-dlp.zip >> yt-dlp
|
||||||
@@ -134,7 +132,7 @@ yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscrip
|
|||||||
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
||||||
|
|
||||||
yt-dlp.tar.gz: all
|
yt-dlp.tar.gz: all
|
||||||
@tar -czf $(DESTDIR)/yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
@tar -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||||
--exclude '*.DS_Store' \
|
--exclude '*.DS_Store' \
|
||||||
--exclude '*.kate-swp' \
|
--exclude '*.kate-swp' \
|
||||||
--exclude '*.pyc' \
|
--exclude '*.pyc' \
|
||||||
|
|||||||
12
devscripts/changelog_override.json
Normal file
12
devscripts/changelog_override.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "2023.02.17",
|
||||||
|
"short": "[priority] **A new release type has been added!**\n * [`nightly`](https://github.com/yt-dlp/yt-dlp/releases/tag/nightly) builds will be made after each push, containing the latest fixes (but also possibly bugs).\n * When using `--update`/`-U`, a release binary will only update to its current channel (either `stable` or `nightly`).\n * The `--update-to` option has been added allowing the user more control over program upgrades (or downgrades).\n * `--update-to` can change the release channel (`stable`, `nightly`) and also upgrade or downgrade to specific tags.\n * **Usage**: `--update-to CHANNEL`, `--update-to TAG`, `--update-to CHANNEL@TAG`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "2023.02.17",
|
||||||
|
"short": "[priority] **YouTube throttling fixes!**"
|
||||||
|
}
|
||||||
|
]
|
||||||
96
devscripts/changelog_override.schema.json
Normal file
96
devscripts/changelog_override.schema.json
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft/2020-12/schema",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"add"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"short"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"remove"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"change"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash",
|
||||||
|
"short",
|
||||||
|
"authors"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -10,15 +10,18 @@
|
|||||||
)
|
)
|
||||||
|
|
||||||
# These bloat the lazy_extractors, so allow them to passthrough silently
|
# These bloat the lazy_extractors, so allow them to passthrough silently
|
||||||
ALLOWED_CLASSMETHODS = {'get_testcases', 'extract_from_webpage'}
|
ALLOWED_CLASSMETHODS = {'extract_from_webpage', 'get_testcases', 'get_webpage_testcases'}
|
||||||
|
_WARNED = False
|
||||||
|
|
||||||
|
|
||||||
class LazyLoadMetaClass(type):
|
class LazyLoadMetaClass(type):
|
||||||
def __getattr__(cls, name):
|
def __getattr__(cls, name):
|
||||||
if '_real_class' not in cls.__dict__ and name not in ALLOWED_CLASSMETHODS:
|
global _WARNED
|
||||||
write_string(
|
if ('_real_class' not in cls.__dict__
|
||||||
'WARNING: Falling back to normal extractor since lazy extractor '
|
and name not in ALLOWED_CLASSMETHODS and not _WARNED):
|
||||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
_WARNED = True
|
||||||
|
write_string('WARNING: Falling back to normal extractor since lazy extractor '
|
||||||
|
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||||
return getattr(cls.real_class, name)
|
return getattr(cls.real_class, name)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
493
devscripts/make_changelog.py
Normal file
493
devscripts/make_changelog.py
Normal file
@@ -0,0 +1,493 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from collections import defaultdict
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
BASE_URL = 'https://github.com'
|
||||||
|
LOCATION_PATH = Path(__file__).parent
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CommitGroup(enum.Enum):
|
||||||
|
UPSTREAM = None
|
||||||
|
PRIORITY = 'Important'
|
||||||
|
CORE = 'Core'
|
||||||
|
EXTRACTOR = 'Extractor'
|
||||||
|
DOWNLOADER = 'Downloader'
|
||||||
|
POSTPROCESSOR = 'Postprocessor'
|
||||||
|
MISC = 'Misc.'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def commit_lookup(cls):
|
||||||
|
return {
|
||||||
|
name: group
|
||||||
|
for group, names in {
|
||||||
|
cls.PRIORITY: {''},
|
||||||
|
cls.UPSTREAM: {'upstream'},
|
||||||
|
cls.CORE: {
|
||||||
|
'aes',
|
||||||
|
'cache',
|
||||||
|
'compat_utils',
|
||||||
|
'compat',
|
||||||
|
'cookies',
|
||||||
|
'core',
|
||||||
|
'dependencies',
|
||||||
|
'jsinterp',
|
||||||
|
'outtmpl',
|
||||||
|
'plugins',
|
||||||
|
'update',
|
||||||
|
'utils',
|
||||||
|
},
|
||||||
|
cls.MISC: {
|
||||||
|
'build',
|
||||||
|
'cleanup',
|
||||||
|
'devscripts',
|
||||||
|
'docs',
|
||||||
|
'misc',
|
||||||
|
'test',
|
||||||
|
},
|
||||||
|
cls.EXTRACTOR: {'extractor', 'extractors'},
|
||||||
|
cls.DOWNLOADER: {'downloader'},
|
||||||
|
cls.POSTPROCESSOR: {'postprocessor'},
|
||||||
|
}.items()
|
||||||
|
for name in names
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(cls, value):
|
||||||
|
result = cls.commit_lookup().get(value)
|
||||||
|
if result:
|
||||||
|
logger.debug(f'Mapped {value!r} => {result.name}')
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Commit:
|
||||||
|
hash: str | None
|
||||||
|
short: str
|
||||||
|
authors: list[str]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
result = f'{self.short!r}'
|
||||||
|
|
||||||
|
if self.hash:
|
||||||
|
result += f' ({self.hash[:7]})'
|
||||||
|
|
||||||
|
if self.authors:
|
||||||
|
authors = ', '.join(self.authors)
|
||||||
|
result += f' by {authors}'
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CommitInfo:
|
||||||
|
details: str | None
|
||||||
|
sub_details: tuple[str, ...]
|
||||||
|
message: str
|
||||||
|
issues: list[str]
|
||||||
|
commit: Commit
|
||||||
|
fixes: list[Commit]
|
||||||
|
|
||||||
|
def key(self):
|
||||||
|
return ((self.details or '').lower(), self.sub_details, self.message)
|
||||||
|
|
||||||
|
|
||||||
|
class Changelog:
|
||||||
|
MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE)
|
||||||
|
|
||||||
|
def __init__(self, groups, repo):
|
||||||
|
self._groups = groups
|
||||||
|
self._repo = repo
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ')
|
||||||
|
|
||||||
|
def _format_groups(self, groups):
|
||||||
|
for item in CommitGroup:
|
||||||
|
group = groups[item]
|
||||||
|
if group:
|
||||||
|
yield self.format_module(item.value, group)
|
||||||
|
|
||||||
|
def format_module(self, name, group):
|
||||||
|
result = f'\n#### {name} changes\n' if name else '\n'
|
||||||
|
return result + '\n'.join(self._format_group(group))
|
||||||
|
|
||||||
|
def _format_group(self, group):
|
||||||
|
sorted_group = sorted(group, key=CommitInfo.key)
|
||||||
|
detail_groups = itertools.groupby(sorted_group, lambda item: (item.details or '').lower())
|
||||||
|
for _, items in detail_groups:
|
||||||
|
items = list(items)
|
||||||
|
details = items[0].details
|
||||||
|
if not details:
|
||||||
|
indent = ''
|
||||||
|
else:
|
||||||
|
yield f'- {details}'
|
||||||
|
indent = '\t'
|
||||||
|
|
||||||
|
if details == 'cleanup':
|
||||||
|
items, cleanup_misc_items = self._filter_cleanup_misc_items(items)
|
||||||
|
|
||||||
|
sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details)))
|
||||||
|
for sub_details, entries in sub_detail_groups:
|
||||||
|
if not sub_details:
|
||||||
|
for entry in entries:
|
||||||
|
yield f'{indent}- {self.format_single_change(entry)}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
entries = list(entries)
|
||||||
|
prefix = f'{indent}- {", ".join(entries[0].sub_details)}'
|
||||||
|
if len(entries) == 1:
|
||||||
|
yield f'{prefix}: {self.format_single_change(entries[0])}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield prefix
|
||||||
|
for entry in entries:
|
||||||
|
yield f'{indent}\t- {self.format_single_change(entry)}'
|
||||||
|
|
||||||
|
if details == 'cleanup' and cleanup_misc_items:
|
||||||
|
yield from self._format_cleanup_misc_sub_group(cleanup_misc_items)
|
||||||
|
|
||||||
|
def _filter_cleanup_misc_items(self, items):
|
||||||
|
cleanup_misc_items = defaultdict(list)
|
||||||
|
non_misc_items = []
|
||||||
|
for item in items:
|
||||||
|
if self.MISC_RE.search(item.message):
|
||||||
|
cleanup_misc_items[tuple(item.commit.authors)].append(item)
|
||||||
|
else:
|
||||||
|
non_misc_items.append(item)
|
||||||
|
|
||||||
|
return non_misc_items, cleanup_misc_items
|
||||||
|
|
||||||
|
def _format_cleanup_misc_sub_group(self, group):
|
||||||
|
prefix = '\t- Miscellaneous'
|
||||||
|
if len(group) == 1:
|
||||||
|
yield f'{prefix}: {next(self._format_cleanup_misc_items(group))}'
|
||||||
|
return
|
||||||
|
|
||||||
|
yield prefix
|
||||||
|
for message in self._format_cleanup_misc_items(group):
|
||||||
|
yield f'\t\t- {message}'
|
||||||
|
|
||||||
|
def _format_cleanup_misc_items(self, group):
|
||||||
|
for authors, infos in group.items():
|
||||||
|
message = ', '.join(
|
||||||
|
self._format_message_link(None, info.commit.hash)
|
||||||
|
for info in sorted(infos, key=lambda item: item.commit.hash or ''))
|
||||||
|
yield f'{message} by {self._format_authors(authors)}'
|
||||||
|
|
||||||
|
def format_single_change(self, info):
|
||||||
|
message = self._format_message_link(info.message, info.commit.hash)
|
||||||
|
if info.issues:
|
||||||
|
message = f'{message} ({self._format_issues(info.issues)})'
|
||||||
|
|
||||||
|
if info.commit.authors:
|
||||||
|
message = f'{message} by {self._format_authors(info.commit.authors)}'
|
||||||
|
|
||||||
|
if info.fixes:
|
||||||
|
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
||||||
|
|
||||||
|
authors = sorted({author for fix in info.fixes for author in fix.authors}, key=str.casefold)
|
||||||
|
if authors != info.commit.authors:
|
||||||
|
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
||||||
|
|
||||||
|
message = f'{message} (With fixes in {fix_message})'
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
def _format_message_link(self, message, hash):
|
||||||
|
assert message or hash, 'Improperly defined commit message or override'
|
||||||
|
message = message if message else hash[:7]
|
||||||
|
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
||||||
|
|
||||||
|
def _format_issues(self, issues):
|
||||||
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _format_authors(authors):
|
||||||
|
return ', '.join(f'[{author}]({BASE_URL}/{author})' for author in authors)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo_url(self):
|
||||||
|
return f'{BASE_URL}/{self._repo}'
|
||||||
|
|
||||||
|
|
||||||
|
class CommitRange:
|
||||||
|
COMMAND = 'git'
|
||||||
|
COMMIT_SEPARATOR = '-----'
|
||||||
|
|
||||||
|
AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE)
|
||||||
|
MESSAGE_RE = re.compile(r'''
|
||||||
|
(?:\[
|
||||||
|
(?P<prefix>[^\]\/:,]+)
|
||||||
|
(?:/(?P<details>[^\]:,]+))?
|
||||||
|
(?:[:,](?P<sub_details>[^\]]+))?
|
||||||
|
\]\ )?
|
||||||
|
(?:(?P<sub_details_alt>`?[^:`]+`?): )?
|
||||||
|
(?P<message>.+?)
|
||||||
|
(?:\ \((?P<issues>\#\d+(?:,\ \#\d+)*)\))?
|
||||||
|
''', re.VERBOSE | re.DOTALL)
|
||||||
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||||
|
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
|
||||||
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||||
|
|
||||||
|
def __init__(self, start, end, default_author=None) -> None:
|
||||||
|
self._start = start
|
||||||
|
self._end = end
|
||||||
|
self._commits, self._fixes = self._get_commits_and_fixes(default_author)
|
||||||
|
self._commits_added = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_single(cls, commitish='HEAD', default_author=None):
|
||||||
|
start_commitish = cls.get_prev_tag(commitish)
|
||||||
|
end_commitish = cls.get_next_tag(commitish)
|
||||||
|
if start_commitish == end_commitish:
|
||||||
|
start_commitish = cls.get_prev_tag(f'{commitish}~')
|
||||||
|
logger.info(f'Determined range from {commitish!r}: {start_commitish}..{end_commitish}')
|
||||||
|
return cls(start_commitish, end_commitish, default_author)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_prev_tag(cls, commitish):
|
||||||
|
command = [cls.COMMAND, 'describe', '--tags', '--abbrev=0', '--exclude=*[^0-9.]*', commitish]
|
||||||
|
return subprocess.check_output(command, text=True).strip()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_next_tag(cls, commitish):
|
||||||
|
result = subprocess.run(
|
||||||
|
[cls.COMMAND, 'describe', '--contains', '--abbrev=0', commitish],
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, text=True)
|
||||||
|
if result.returncode:
|
||||||
|
return 'HEAD'
|
||||||
|
|
||||||
|
return result.stdout.partition('~')[0].strip()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(itertools.chain(self._commits.values(), self._commits_added))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._commits) + len(self._commits_added)
|
||||||
|
|
||||||
|
def __contains__(self, commit):
|
||||||
|
if isinstance(commit, Commit):
|
||||||
|
if not commit.hash:
|
||||||
|
return False
|
||||||
|
commit = commit.hash
|
||||||
|
|
||||||
|
return commit in self._commits
|
||||||
|
|
||||||
|
def _is_ancestor(self, commitish):
|
||||||
|
return bool(subprocess.call(
|
||||||
|
[self.COMMAND, 'merge-base', '--is-ancestor', commitish, self._start]))
|
||||||
|
|
||||||
|
def _get_commits_and_fixes(self, default_author):
|
||||||
|
result = subprocess.check_output([
|
||||||
|
self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
|
||||||
|
f'{self._start}..{self._end}'], text=True)
|
||||||
|
|
||||||
|
commits = {}
|
||||||
|
fixes = defaultdict(list)
|
||||||
|
lines = iter(result.splitlines(False))
|
||||||
|
for line in lines:
|
||||||
|
commit_hash = line
|
||||||
|
short = next(lines)
|
||||||
|
skip = short.startswith('Release ') or short == '[version] update'
|
||||||
|
|
||||||
|
authors = [default_author] if default_author else []
|
||||||
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||||
|
match = self.AUTHOR_INDICATOR_RE.match(line)
|
||||||
|
if match:
|
||||||
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||||
|
|
||||||
|
commit = Commit(commit_hash, short, authors)
|
||||||
|
if skip:
|
||||||
|
logger.debug(f'Skipped commit: {commit}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
fix_match = self.FIXES_RE.search(commit.short)
|
||||||
|
if fix_match:
|
||||||
|
commitish = fix_match.group(1)
|
||||||
|
fixes[commitish].append(commit)
|
||||||
|
|
||||||
|
commits[commit.hash] = commit
|
||||||
|
|
||||||
|
for commitish, fix_commits in fixes.items():
|
||||||
|
if commitish in commits:
|
||||||
|
hashes = ', '.join(commit.hash[:7] for commit in fix_commits)
|
||||||
|
logger.info(f'Found fix(es) for {commitish[:7]}: {hashes}')
|
||||||
|
for fix_commit in fix_commits:
|
||||||
|
del commits[fix_commit.hash]
|
||||||
|
else:
|
||||||
|
logger.debug(f'Commit with fixes not in changes: {commitish[:7]}')
|
||||||
|
|
||||||
|
return commits, fixes
|
||||||
|
|
||||||
|
def apply_overrides(self, overrides):
|
||||||
|
for override in overrides:
|
||||||
|
when = override.get('when')
|
||||||
|
if when and when not in self and when != self._start:
|
||||||
|
logger.debug(f'Ignored {when!r}, not in commits {self._start!r}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
override_hash = override.get('hash')
|
||||||
|
if override['action'] == 'add':
|
||||||
|
commit = Commit(override.get('hash'), override['short'], override.get('authors') or [])
|
||||||
|
logger.info(f'ADD {commit}')
|
||||||
|
self._commits_added.append(commit)
|
||||||
|
|
||||||
|
elif override['action'] == 'remove':
|
||||||
|
if override_hash in self._commits:
|
||||||
|
logger.info(f'REMOVE {self._commits[override_hash]}')
|
||||||
|
del self._commits[override_hash]
|
||||||
|
|
||||||
|
elif override['action'] == 'change':
|
||||||
|
if override_hash not in self._commits:
|
||||||
|
continue
|
||||||
|
commit = Commit(override_hash, override['short'], override['authors'])
|
||||||
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||||
|
self._commits[commit.hash] = commit
|
||||||
|
|
||||||
|
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
||||||
|
|
||||||
|
def groups(self):
|
||||||
|
groups = defaultdict(list)
|
||||||
|
for commit in self:
|
||||||
|
upstream_re = self.UPSTREAM_MERGE_RE.match(commit.short)
|
||||||
|
if upstream_re:
|
||||||
|
commit.short = f'[upstream] Merge up to youtube-dl {upstream_re.group(1)}'
|
||||||
|
|
||||||
|
match = self.MESSAGE_RE.fullmatch(commit.short)
|
||||||
|
if not match:
|
||||||
|
logger.error(f'Error parsing short commit message: {commit.short!r}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
prefix, details, sub_details, sub_details_alt, message, issues = match.groups()
|
||||||
|
group = None
|
||||||
|
if prefix:
|
||||||
|
if prefix == 'priority':
|
||||||
|
prefix, _, details = (details or '').partition('/')
|
||||||
|
logger.debug(f'Priority: {message!r}')
|
||||||
|
group = CommitGroup.PRIORITY
|
||||||
|
|
||||||
|
if not details and prefix:
|
||||||
|
if prefix not in ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream'):
|
||||||
|
logger.debug(f'Replaced details with {prefix!r}')
|
||||||
|
details = prefix or None
|
||||||
|
|
||||||
|
if details == 'common':
|
||||||
|
details = None
|
||||||
|
|
||||||
|
if details:
|
||||||
|
details = details.strip()
|
||||||
|
|
||||||
|
else:
|
||||||
|
group = CommitGroup.CORE
|
||||||
|
|
||||||
|
sub_details = f'{sub_details or ""},{sub_details_alt or ""}'.replace(':', ',')
|
||||||
|
sub_details = tuple(filter(None, map(str.strip, sub_details.split(','))))
|
||||||
|
|
||||||
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||||
|
|
||||||
|
if not group:
|
||||||
|
group = CommitGroup.get(prefix.lower())
|
||||||
|
if not group:
|
||||||
|
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
||||||
|
group = CommitGroup.EXTRACTOR
|
||||||
|
else:
|
||||||
|
group = CommitGroup.POSTPROCESSOR
|
||||||
|
logger.warning(f'Failed to map {commit.short!r}, selected {group.name}')
|
||||||
|
|
||||||
|
commit_info = CommitInfo(
|
||||||
|
details, sub_details, message.strip(),
|
||||||
|
issues, commit, self._fixes[commit.hash])
|
||||||
|
logger.debug(f'Resolved {commit.short!r} to {commit_info!r}')
|
||||||
|
groups[group].append(commit_info)
|
||||||
|
|
||||||
|
return groups
|
||||||
|
|
||||||
|
|
||||||
|
def get_new_contributors(contributors_path, commits):
|
||||||
|
contributors = set()
|
||||||
|
if contributors_path.exists():
|
||||||
|
with contributors_path.open() as file:
|
||||||
|
for line in filter(None, map(str.strip, file)):
|
||||||
|
author, _, _ = line.partition(' (')
|
||||||
|
authors = author.split('/')
|
||||||
|
contributors.update(map(str.casefold, authors))
|
||||||
|
|
||||||
|
new_contributors = set()
|
||||||
|
for commit in commits:
|
||||||
|
for author in commit.authors:
|
||||||
|
author_folded = author.casefold()
|
||||||
|
if author_folded not in contributors:
|
||||||
|
contributors.add(author_folded)
|
||||||
|
new_contributors.add(author)
|
||||||
|
|
||||||
|
return sorted(new_contributors, key=str.casefold)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Create a changelog markdown from a git commit range')
|
||||||
|
parser.add_argument(
|
||||||
|
'commitish', default='HEAD', nargs='?',
|
||||||
|
help='The commitish to create the range from (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-v', '--verbosity', action='count', default=0,
|
||||||
|
help='increase verbosity (can be used twice)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--contributors', action='store_true',
|
||||||
|
help='update CONTRIBUTORS file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--contributors-path', type=Path, default=LOCATION_PATH.parent / 'CONTRIBUTORS',
|
||||||
|
help='path to the CONTRIBUTORS file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--no-override', action='store_true',
|
||||||
|
help='skip override json in commit generation (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--override-path', type=Path, default=LOCATION_PATH / 'changelog_override.json',
|
||||||
|
help='path to the changelog_override.json file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--default-author', default='pukkandan',
|
||||||
|
help='the author to use without a author indicator (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo', default='yt-dlp/yt-dlp',
|
||||||
|
help='the github repository to use for the operations (default: %(default)s)')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
||||||
|
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
||||||
|
|
||||||
|
commits = CommitRange.from_single(args.commitish, args.default_author)
|
||||||
|
|
||||||
|
if not args.no_override:
|
||||||
|
if args.override_path.exists():
|
||||||
|
with args.override_path.open() as file:
|
||||||
|
overrides = json.load(file)
|
||||||
|
commits.apply_overrides(overrides)
|
||||||
|
else:
|
||||||
|
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
||||||
|
|
||||||
|
logger.info(f'Loaded {len(commits)} commits')
|
||||||
|
|
||||||
|
new_contributors = get_new_contributors(args.contributors_path, commits)
|
||||||
|
if new_contributors:
|
||||||
|
if args.contributors:
|
||||||
|
with args.contributors_path.open('a') as file:
|
||||||
|
file.writelines(f'{contributor}\n' for contributor in new_contributors)
|
||||||
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||||
|
|
||||||
|
print(Changelog(commits.groups(), args.repo))
|
||||||
@@ -24,6 +24,8 @@
|
|||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -58,7 +60,7 @@
|
|||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
options:
|
options:
|
||||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\\* field
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
||||||
required: true
|
required: true
|
||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
@@ -12,9 +13,18 @@
|
|||||||
from devscripts.utils import get_filename_args, read_file, write_file
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
|
||||||
NO_ATTR = object()
|
NO_ATTR = object()
|
||||||
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_VALID_URL', '_WORKING', '_NETRC_MACHINE', 'age_limit']
|
STATIC_CLASS_PROPERTIES = [
|
||||||
|
'IE_NAME', '_ENABLED', '_VALID_URL', # Used for URL matching
|
||||||
|
'_WORKING', 'IE_DESC', '_NETRC_MACHINE', 'SEARCH_KEY', # Used for --extractor-descriptions
|
||||||
|
'age_limit', # Used for --age-limit (evaluated)
|
||||||
|
'_RETURN_TYPE', # Accessed in CLI only with instance (evaluated)
|
||||||
|
]
|
||||||
CLASS_METHODS = [
|
CLASS_METHODS = [
|
||||||
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id', 'is_suitable'
|
'ie_key', 'suitable', '_match_valid_url', # Used for URL matching
|
||||||
|
'working', 'get_temp_id', '_match_id', # Accessed just before instance creation
|
||||||
|
'description', # Used for --extractor-descriptions
|
||||||
|
'is_suitable', # Used for --age-limit
|
||||||
|
'supports_login', 'is_single_video', # Accessed in CLI only with instance
|
||||||
]
|
]
|
||||||
IE_TEMPLATE = '''
|
IE_TEMPLATE = '''
|
||||||
class {name}({bases}):
|
class {name}({bases}):
|
||||||
@@ -30,8 +40,12 @@ def main():
|
|||||||
|
|
||||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||||
|
|
||||||
|
import yt_dlp.plugins
|
||||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||||
|
|
||||||
|
# Filter out plugins
|
||||||
|
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||||
|
|
||||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||||
module_src = '\n'.join((
|
module_src = '\n'.join((
|
||||||
MODULE_TEMPLATE,
|
MODULE_TEMPLATE,
|
||||||
@@ -48,12 +62,13 @@ def get_all_ies():
|
|||||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||||
if os.path.exists(PLUGINS_DIRNAME):
|
if os.path.exists(PLUGINS_DIRNAME):
|
||||||
os.rename(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||||
|
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||||
try:
|
try:
|
||||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||||
finally:
|
finally:
|
||||||
if os.path.exists(BLOCKED_DIRNAME):
|
if os.path.exists(BLOCKED_DIRNAME):
|
||||||
os.rename(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||||
return _ALL_CLASSES
|
return _ALL_CLASSES
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -45,33 +45,43 @@ def apply_patch(text, patch):
|
|||||||
delim = f'\n{" " * switch_col_width}'
|
delim = f'\n{" " * switch_col_width}'
|
||||||
|
|
||||||
PATCHES = (
|
PATCHES = (
|
||||||
( # Standardize update message
|
( # Standardize `--update` message
|
||||||
r'(?m)^( -U, --update\s+).+(\n \s.+)*$',
|
r'(?m)^( -U, --update\s+).+(\n \s.+)*$',
|
||||||
r'\1Update this program to the latest version',
|
r'\1Update this program to the latest version',
|
||||||
),
|
),
|
||||||
( # Headings
|
( # Headings
|
||||||
r'(?m)^ (\w.+\n)( (?=\w))?',
|
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||||
r'## \1'
|
r'## \1'
|
||||||
),
|
),
|
||||||
( # Do not split URLs
|
( # Fixup `--date` formatting
|
||||||
|
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
||||||
|
(rf'\1[now|today|yesterday][-N[day|week|month|year]].{delim}'
|
||||||
|
f'E.g. "--date today-2weeks" downloads only{delim}'
|
||||||
|
'videos uploaded on the same day two weeks ago'),
|
||||||
|
),
|
||||||
|
( # Do not split URLs
|
||||||
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
||||||
),
|
),
|
||||||
( # Do not split "words"
|
( # Do not split "words"
|
||||||
rf'(?m)({delim}\S+)+$',
|
rf'(?m)({delim}\S+)+$',
|
||||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
||||||
),
|
),
|
||||||
( # Allow overshooting last line
|
( # Allow overshooting last line
|
||||||
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||||
lambda mobj: (mobj.group().replace(delim, ' ')
|
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||||
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||||
else mobj.group())
|
else mobj.group())
|
||||||
),
|
),
|
||||||
( # Avoid newline when a space is available b/w switch and description
|
( # Avoid newline when a space is available b/w switch and description
|
||||||
DISABLE_PATCH, # This creates issues with prepare_manpage
|
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||||
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||||
r'\1 '
|
r'\1 '
|
||||||
),
|
),
|
||||||
|
( # Replace brackets with a Markdown link
|
||||||
|
r'SponsorBlock API \((http.+)\)',
|
||||||
|
r'[SponsorBlock API](\1)'
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
readme = read_file(README_FILE)
|
readme = read_file(README_FILE)
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
if [ -z $1 ]; then
|
if [ -z "$1" ]; then
|
||||||
test_set='test'
|
test_set='test'
|
||||||
elif [ $1 = 'core' ]; then
|
elif [ "$1" = 'core' ]; then
|
||||||
test_set="-m not download"
|
test_set="-m not download"
|
||||||
elif [ $1 = 'download' ]; then
|
elif [ "$1" = 'download' ]; then
|
||||||
test_set="-m download"
|
test_set="-m download"
|
||||||
else
|
else
|
||||||
echo 'Invalid test type "'$1'". Use "core" | "download"'
|
echo 'Invalid test type "'"$1"'". Use "core" | "download"'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -15,8 +16,9 @@
|
|||||||
from devscripts.utils import read_version, write_file
|
from devscripts.utils import read_version, write_file
|
||||||
|
|
||||||
|
|
||||||
def get_new_version(revision):
|
def get_new_version(version, revision):
|
||||||
version = datetime.utcnow().strftime('%Y.%m.%d')
|
if not version:
|
||||||
|
version = datetime.utcnow().strftime('%Y.%m.%d')
|
||||||
|
|
||||||
if revision:
|
if revision:
|
||||||
assert revision.isdigit(), 'Revision must be a number'
|
assert revision.isdigit(), 'Revision must be a number'
|
||||||
@@ -30,25 +32,41 @@ def get_new_version(revision):
|
|||||||
|
|
||||||
def get_git_head():
|
def get_git_head():
|
||||||
with contextlib.suppress(Exception):
|
with contextlib.suppress(Exception):
|
||||||
sp = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
|
return subprocess.check_output(['git', 'rev-parse', 'HEAD'], text=True).strip() or None
|
||||||
return sp.communicate()[0].decode().strip() or None
|
|
||||||
|
|
||||||
|
|
||||||
VERSION = get_new_version((sys.argv + [''])[1])
|
VERSION_TEMPLATE = '''\
|
||||||
GIT_HEAD = get_git_head()
|
|
||||||
|
|
||||||
VERSION_FILE = f'''\
|
|
||||||
# Autogenerated by devscripts/update-version.py
|
# Autogenerated by devscripts/update-version.py
|
||||||
|
|
||||||
__version__ = {VERSION!r}
|
__version__ = {version!r}
|
||||||
|
|
||||||
RELEASE_GIT_HEAD = {GIT_HEAD!r}
|
RELEASE_GIT_HEAD = {git_head!r}
|
||||||
|
|
||||||
VARIANT = None
|
VARIANT = None
|
||||||
|
|
||||||
UPDATE_HINT = None
|
UPDATE_HINT = None
|
||||||
|
|
||||||
|
CHANNEL = {channel!r}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
write_file('yt_dlp/version.py', VERSION_FILE)
|
if __name__ == '__main__':
|
||||||
print(f'::set-output name=ytdlp_version::{VERSION}')
|
parser = argparse.ArgumentParser(description='Update the version.py file')
|
||||||
print(f'\nVersion = {VERSION}, Git HEAD = {GIT_HEAD}')
|
parser.add_argument(
|
||||||
|
'-c', '--channel', choices=['stable', 'nightly'], default='stable',
|
||||||
|
help='Select update channel (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--output', default='yt_dlp/version.py',
|
||||||
|
help='The output file to write to (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'version', nargs='?', default=None,
|
||||||
|
help='A version or revision to use instead of generating one')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
git_head = get_git_head()
|
||||||
|
version = (
|
||||||
|
args.version if args.version and '.' in args.version
|
||||||
|
else get_new_version(None, args.version))
|
||||||
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
|
version=version, git_head=git_head, channel=args.channel))
|
||||||
|
|
||||||
|
print(f'version={version} ({args.channel}), head={git_head}')
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ def read_file(fname):
|
|||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
def write_file(fname, content):
|
def write_file(fname, content, mode='w'):
|
||||||
with open(fname, 'w', encoding='utf-8') as f:
|
with open(fname, mode, encoding='utf-8') as f:
|
||||||
return f.write(content)
|
return f.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
29
public.key
Normal file
29
public.key
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
|
mQINBGP78C4BEAD0rF9zjGPAt0thlt5C1ebzccAVX7Nb1v+eqQjk+WEZdTETVCg3
|
||||||
|
WAM5ngArlHdm/fZqzUgO+pAYrB60GKeg7ffUDf+S0XFKEZdeRLYeAaqqKhSibVal
|
||||||
|
DjvOBOztu3W607HLETQAqA7wTPuIt2WqmpL60NIcyr27LxqmgdN3mNvZ2iLO+bP0
|
||||||
|
nKR/C+PgE9H4ytywDa12zMx6PmZCnVOOOu6XZEFmdUxxdQ9fFDqd9LcBKY2LDOcS
|
||||||
|
Yo1saY0YWiZWHtzVoZu1kOzjnS5Fjq/yBHJLImDH7pNxHm7s/PnaurpmQFtDFruk
|
||||||
|
t+2lhDnpKUmGr/I/3IHqH/X+9nPoS4uiqQ5HpblB8BK+4WfpaiEg75LnvuOPfZIP
|
||||||
|
KYyXa/0A7QojMwgOrD88ozT+VCkKkkJ+ijXZ7gHNjmcBaUdKK7fDIEOYI63Lyc6Q
|
||||||
|
WkGQTigFffSUXWHDCO9aXNhP3ejqFWgGMtCUsrbkcJkWuWY7q5ARy/05HbSM3K4D
|
||||||
|
U9eqtnxmiV1WQ8nXuI9JgJQRvh5PTkny5LtxqzcmqvWO9TjHBbrs14BPEO9fcXxK
|
||||||
|
L/CFBbzXDSvvAgArdqqlMoncQ/yicTlfL6qzJ8EKFiqW14QMTdAn6SuuZTodXCTi
|
||||||
|
InwoT7WjjuFPKKdvfH1GP4bnqdzTnzLxCSDIEtfyfPsIX+9GI7Jkk/zZjQARAQAB
|
||||||
|
tDdTaW1vbiBTYXdpY2tpICh5dC1kbHAgc2lnbmluZyBrZXkpIDxjb250YWN0QGdy
|
||||||
|
dWI0ay54eXo+iQJOBBMBCgA4FiEErAy75oSNaoc0ZK9OV89lkztadYEFAmP78C4C
|
||||||
|
GwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQV89lkztadYEVqQ//cW7TxhXg
|
||||||
|
7Xbh2EZQzXml0egn6j8QaV9KzGragMiShrlvTO2zXfLXqyizrFP4AspgjSn/4NrI
|
||||||
|
8mluom+Yi+qr7DXT4BjQqIM9y3AjwZPdywe912Lxcw52NNoPZCm24I9T7ySc8lmR
|
||||||
|
FQvZC0w4H/VTNj/2lgJ1dwMflpwvNRiWa5YzcFGlCUeDIPskLx9++AJE+xwU3LYm
|
||||||
|
jQQsPBqpHHiTBEJzMLl+rfd9Fg4N+QNzpFkTDW3EPerLuvJniSBBwZthqxeAtw4M
|
||||||
|
UiAXh6JvCc2hJkKCoygRfM281MeolvmsGNyQm+axlB0vyldiPP6BnaRgZlx+l6MU
|
||||||
|
cPqgHblb7RW5j9lfr6OYL7SceBIHNv0CFrt1OnkGo/tVMwcs8LH3Ae4a7UJlIceL
|
||||||
|
V54aRxSsZU7w4iX+PB79BWkEsQzwKrUuJVOeL4UDwWajp75OFaUqbS/slDDVXvK5
|
||||||
|
OIeuth3mA/adjdvgjPxhRQjA3l69rRWIJDrqBSHldmRsnX6cvXTDy8wSXZgy51lP
|
||||||
|
m4IVLHnCy9m4SaGGoAsfTZS0cC9FgjUIyTyrq9M67wOMpUxnuB0aRZgJE1DsI23E
|
||||||
|
qdvcSNVlO+39xM/KPWUEh6b83wMn88QeW+DCVGWACQq5N3YdPnAJa50617fGbY6I
|
||||||
|
gXIoRHXkDqe23PZ/jURYCv0sjVtjPoVC+bg=
|
||||||
|
=bJkn
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
||||||
39
pyinst.py
39
pyinst.py
@@ -12,9 +12,8 @@
|
|||||||
|
|
||||||
from devscripts.utils import read_version
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine(), platform.architecture()[0][:2]
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
if MACHINE in ('x86_64', 'AMD64') or ('i' in MACHINE and '86' in MACHINE):
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
# NB: Windows x86 has MACHINE = AMD64 irrespective of bitness
|
|
||||||
MACHINE = 'x86' if ARCH == '32' else ''
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
|
|
||||||
@@ -38,7 +37,7 @@ def main():
|
|||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
*dependency_options(),
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
*opts,
|
*opts,
|
||||||
'yt_dlp/__main__.py',
|
'yt_dlp/__main__.py',
|
||||||
]
|
]
|
||||||
@@ -63,7 +62,7 @@ def exe(onedir):
|
|||||||
name = '_'.join(filter(None, (
|
name = '_'.join(filter(None, (
|
||||||
'yt-dlp',
|
'yt-dlp',
|
||||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||||
MACHINE
|
MACHINE,
|
||||||
)))
|
)))
|
||||||
return name, ''.join(filter(None, (
|
return name, ''.join(filter(None, (
|
||||||
'dist/',
|
'dist/',
|
||||||
@@ -78,30 +77,6 @@ def version_to_list(version):
|
|||||||
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
||||||
|
|
||||||
|
|
||||||
def dependency_options():
|
|
||||||
# Due to the current implementation, these are auto-detected, but explicitly add them just in case
|
|
||||||
dependencies = [pycryptodome_module(), 'mutagen', 'brotli', 'certifi', 'websockets']
|
|
||||||
excluded_modules = ['test', 'ytdlp_plugins', 'youtube_dl', 'youtube_dlc']
|
|
||||||
|
|
||||||
yield from (f'--hidden-import={module}' for module in dependencies)
|
|
||||||
yield '--collect-submodules=websockets'
|
|
||||||
yield from (f'--exclude-module={module}' for module in excluded_modules)
|
|
||||||
|
|
||||||
|
|
||||||
def pycryptodome_module():
|
|
||||||
try:
|
|
||||||
import Cryptodome # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import Crypto # noqa: F401
|
|
||||||
print('WARNING: Using Crypto since Cryptodome is not available. '
|
|
||||||
'Install with: pip install pycryptodomex', file=sys.stderr)
|
|
||||||
return 'Crypto'
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
return 'Cryptodome'
|
|
||||||
|
|
||||||
|
|
||||||
def set_version_info(exe, version):
|
def set_version_info(exe, version):
|
||||||
if OS_NAME == 'win32':
|
if OS_NAME == 'win32':
|
||||||
windows_set_version(exe, version)
|
windows_set_version(exe, version)
|
||||||
@@ -110,7 +85,6 @@ def set_version_info(exe, version):
|
|||||||
def windows_set_version(exe, version):
|
def windows_set_version(exe, version):
|
||||||
from PyInstaller.utils.win32.versioninfo import (
|
from PyInstaller.utils.win32.versioninfo import (
|
||||||
FixedFileInfo,
|
FixedFileInfo,
|
||||||
SetVersion,
|
|
||||||
StringFileInfo,
|
StringFileInfo,
|
||||||
StringStruct,
|
StringStruct,
|
||||||
StringTable,
|
StringTable,
|
||||||
@@ -119,6 +93,11 @@ def windows_set_version(exe, version):
|
|||||||
VSVersionInfo,
|
VSVersionInfo,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from PyInstaller.utils.win32.versioninfo import SetVersion
|
||||||
|
except ImportError: # Pyinstaller >= 5.8
|
||||||
|
from PyInstaller.utils.win32.versioninfo import write_version_info_to_executable as SetVersion
|
||||||
|
|
||||||
version_list = version_to_list(version)
|
version_list = version_to_list(version)
|
||||||
suffix = MACHINE and f'_{MACHINE}'
|
suffix = MACHINE and f'_{MACHINE}'
|
||||||
SetVersion(exe, VSVersionInfo(
|
SetVersion(exe, VSVersionInfo(
|
||||||
|
|||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[build-system]
|
||||||
|
build-backend = 'setuptools.build_meta'
|
||||||
|
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
||||||
|
# https://github.com/pypa/distutils/issues/17
|
||||||
|
requires = ['setuptools > 50']
|
||||||
12
setup.cfg
12
setup.cfg
@@ -10,6 +10,14 @@ per_file_ignores =
|
|||||||
devscripts/lazy_load_template.py: F401
|
devscripts/lazy_load_template.py: F401
|
||||||
|
|
||||||
|
|
||||||
|
[autoflake]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
ignore-pass-after-docstring = true
|
||||||
|
remove-all-unused-imports = true
|
||||||
|
remove-duplicate-keys = true
|
||||||
|
remove-unused-variables = true
|
||||||
|
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
addopts = -ra -v --strict-markers
|
addopts = -ra -v --strict-markers
|
||||||
markers =
|
markers =
|
||||||
@@ -18,12 +26,12 @@ markers =
|
|||||||
|
|
||||||
[tox:tox]
|
[tox:tox]
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
envlist = py{36,37,38,39,310},pypy{36,37,38,39}
|
envlist = py{36,37,38,39,310,311},pypy{36,37,38,39}
|
||||||
skip_missing_interpreters = true
|
skip_missing_interpreters = true
|
||||||
|
|
||||||
[testenv] # tox
|
[testenv] # tox
|
||||||
deps =
|
deps =
|
||||||
pytest
|
pytest
|
||||||
commands = pytest {posargs:"-m not download"}
|
commands = pytest {posargs:"-m not download"}
|
||||||
passenv = HOME # For test_compat_expanduser
|
passenv = HOME # For test_compat_expanduser
|
||||||
setenv =
|
setenv =
|
||||||
|
|||||||
141
setup.py
141
setup.py
@@ -1,8 +1,12 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os.path
|
# Allow execution from anywhere
|
||||||
import subprocess
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
import subprocess
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -28,45 +32,42 @@
|
|||||||
|
|
||||||
def packages():
|
def packages():
|
||||||
if setuptools_available:
|
if setuptools_available:
|
||||||
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins'))
|
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
||||||
|
|
||||||
return [
|
return [
|
||||||
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
||||||
'yt_dlp.extractor.anvato_token_generator',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def py2exe_params():
|
def py2exe_params():
|
||||||
import py2exe # noqa: F401
|
|
||||||
|
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||||
'The recommended way is to use "pyinst.py" to build using pyinstaller')
|
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'console': [{
|
'console': [{
|
||||||
'script': './yt_dlp/__main__.py',
|
'script': './yt_dlp/__main__.py',
|
||||||
'dest_base': 'yt-dlp',
|
'dest_base': 'yt-dlp',
|
||||||
|
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||||
|
}],
|
||||||
|
'version_info': {
|
||||||
'version': VERSION,
|
'version': VERSION,
|
||||||
'description': DESCRIPTION,
|
'description': DESCRIPTION,
|
||||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
'comments': LONG_DESCRIPTION.split('\n')[0],
|
||||||
'product_name': 'yt-dlp',
|
'product_name': 'yt-dlp',
|
||||||
'product_version': VERSION,
|
'product_version': VERSION,
|
||||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
|
||||||
}],
|
|
||||||
'options': {
|
|
||||||
'py2exe': {
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
# Modules that are only imported dynamically must be added here
|
|
||||||
'includes': ['yt_dlp.compat._legacy'],
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
'zipfile': None
|
'options': {
|
||||||
|
'bundle_files': 0,
|
||||||
|
'compressed': 1,
|
||||||
|
'optimize': 2,
|
||||||
|
'dist_dir': './dist',
|
||||||
|
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
||||||
|
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||||
|
# Modules that are only imported dynamically must be added here
|
||||||
|
'includes': ['yt_dlp.compat._legacy'],
|
||||||
|
},
|
||||||
|
'zipfile': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -91,7 +92,10 @@ def build_params():
|
|||||||
params = {'data_files': data_files}
|
params = {'data_files': data_files}
|
||||||
|
|
||||||
if setuptools_available:
|
if setuptools_available:
|
||||||
params['entry_points'] = {'console_scripts': ['yt-dlp = yt_dlp:main']}
|
params['entry_points'] = {
|
||||||
|
'console_scripts': ['yt-dlp = yt_dlp:main'],
|
||||||
|
'pyinstaller40': ['hook-dirs = yt_dlp.__pyinstaller:get_hook_dirs'],
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
params['scripts'] = ['yt-dlp']
|
params['scripts'] = ['yt-dlp']
|
||||||
return params
|
return params
|
||||||
@@ -114,41 +118,58 @@ def run(self):
|
|||||||
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
||||||
|
|
||||||
|
|
||||||
params = py2exe_params() if sys.argv[1:2] == ['py2exe'] else build_params()
|
def main():
|
||||||
setup(
|
if sys.argv[1:2] == ['py2exe']:
|
||||||
name='yt-dlp',
|
params = py2exe_params()
|
||||||
version=VERSION,
|
try:
|
||||||
maintainer='pukkandan',
|
from py2exe import freeze
|
||||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
except ImportError:
|
||||||
description=DESCRIPTION,
|
import py2exe # noqa: F401
|
||||||
long_description=LONG_DESCRIPTION,
|
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
||||||
long_description_content_type='text/markdown',
|
params['console'][0].update(params.pop('version_info'))
|
||||||
url='https://github.com/yt-dlp/yt-dlp',
|
params['options'] = {'py2exe': params.pop('options')}
|
||||||
packages=packages(),
|
else:
|
||||||
install_requires=REQUIREMENTS,
|
return freeze(**params)
|
||||||
python_requires='>=3.7',
|
else:
|
||||||
project_urls={
|
params = build_params()
|
||||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
|
||||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
setup(
|
||||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
name='yt-dlp',
|
||||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
version=VERSION,
|
||||||
},
|
maintainer='pukkandan',
|
||||||
classifiers=[
|
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||||
'Topic :: Multimedia :: Video',
|
description=DESCRIPTION,
|
||||||
'Development Status :: 5 - Production/Stable',
|
long_description=LONG_DESCRIPTION,
|
||||||
'Environment :: Console',
|
long_description_content_type='text/markdown',
|
||||||
'Programming Language :: Python',
|
url='https://github.com/yt-dlp/yt-dlp',
|
||||||
'Programming Language :: Python :: 3.7',
|
packages=packages(),
|
||||||
'Programming Language :: Python :: 3.8',
|
install_requires=REQUIREMENTS,
|
||||||
'Programming Language :: Python :: 3.9',
|
python_requires='>=3.7',
|
||||||
'Programming Language :: Python :: 3.10',
|
project_urls={
|
||||||
'Programming Language :: Python :: 3.11',
|
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
||||||
'Programming Language :: Python :: Implementation',
|
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
||||||
'License :: Public Domain',
|
},
|
||||||
'Operating System :: OS Independent',
|
classifiers=[
|
||||||
],
|
'Topic :: Multimedia :: Video',
|
||||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
'Development Status :: 5 - Production/Stable',
|
||||||
**params
|
'Environment :: Console',
|
||||||
)
|
'Programming Language :: Python',
|
||||||
|
'Programming Language :: Python :: 3.7',
|
||||||
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
|
'Programming Language :: Python :: 3.10',
|
||||||
|
'Programming Language :: Python :: 3.11',
|
||||||
|
'Programming Language :: Python :: Implementation',
|
||||||
|
'Programming Language :: Python :: Implementation :: CPython',
|
||||||
|
'Programming Language :: Python :: Implementation :: PyPy',
|
||||||
|
'License :: Public Domain',
|
||||||
|
'Operating System :: OS Independent',
|
||||||
|
],
|
||||||
|
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
||||||
|
**params
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
main()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -222,6 +222,10 @@ def sanitize(key, value):
|
|||||||
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
||||||
test_info_dict.pop('display_id')
|
test_info_dict.pop('display_id')
|
||||||
|
|
||||||
|
# Check url for flat entries
|
||||||
|
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
||||||
|
test_info_dict['url'] = got_dict['url']
|
||||||
|
|
||||||
return test_info_dict
|
return test_info_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -235,8 +239,9 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
for key in mandatory_fields:
|
for key in mandatory_fields:
|
||||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
||||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
if got_dict.get('_type', 'video') == 'video':
|
||||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||||
|
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||||
|
|
||||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||||
|
|
||||||
@@ -249,19 +254,16 @@ def _repr(v):
|
|||||||
return v.__name__
|
return v.__name__
|
||||||
else:
|
else:
|
||||||
return repr(v)
|
return repr(v)
|
||||||
info_dict_str = ''
|
info_dict_str = ''.join(
|
||||||
if len(missing_keys) != len(expected_dict):
|
f' {_repr(k)}: {_repr(v)},\n'
|
||||||
info_dict_str += ''.join(
|
for k, v in test_info_dict.items() if k not in missing_keys)
|
||||||
f' {_repr(k)}: {_repr(v)},\n'
|
if info_dict_str:
|
||||||
for k, v in test_info_dict.items() if k not in missing_keys)
|
info_dict_str += '\n'
|
||||||
|
|
||||||
if info_dict_str:
|
|
||||||
info_dict_str += '\n'
|
|
||||||
info_dict_str += ''.join(
|
info_dict_str += ''.join(
|
||||||
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
||||||
for k in missing_keys)
|
for k in missing_keys)
|
||||||
write_string(
|
info_dict_str = '\n\'info_dict\': {\n' + info_dict_str + '},\n'
|
||||||
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
|
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
missing_keys,
|
missing_keys,
|
||||||
'Missing keys in test definition: %s' % (
|
'Missing keys in test definition: %s' % (
|
||||||
|
|||||||
@@ -44,5 +44,6 @@
|
|||||||
"writesubtitles": false,
|
"writesubtitles": false,
|
||||||
"allsubtitles": false,
|
"allsubtitles": false,
|
||||||
"listsubtitles": false,
|
"listsubtitles": false,
|
||||||
"fixup": "never"
|
"fixup": "never",
|
||||||
|
"allow_playlist_files": false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,9 @@ def do_GET(self):
|
|||||||
|
|
||||||
|
|
||||||
class DummyIE(InfoExtractor):
|
class DummyIE(InfoExtractor):
|
||||||
pass
|
def _sort_formats(self, formats, field_preference=[]):
|
||||||
|
self._downloader.sort_formats(
|
||||||
|
{'formats': formats, '_format_sort_fields': field_preference})
|
||||||
|
|
||||||
|
|
||||||
class TestInfoExtractor(unittest.TestCase):
|
class TestInfoExtractor(unittest.TestCase):
|
||||||
@@ -67,6 +69,7 @@ def test_opengraph(self):
|
|||||||
<meta name="og:test1" content='foo > < bar'/>
|
<meta name="og:test1" content='foo > < bar'/>
|
||||||
<meta name="og:test2" content="foo >//< bar"/>
|
<meta name="og:test2" content="foo >//< bar"/>
|
||||||
<meta property=og-test3 content='Ill-formatted opengraph'/>
|
<meta property=og-test3 content='Ill-formatted opengraph'/>
|
||||||
|
<meta property=og:test4 content=unquoted-value/>
|
||||||
'''
|
'''
|
||||||
self.assertEqual(ie._og_search_title(html), 'Foo')
|
self.assertEqual(ie._og_search_title(html), 'Foo')
|
||||||
self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
|
self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
|
||||||
@@ -79,6 +82,7 @@ def test_opengraph(self):
|
|||||||
self.assertEqual(ie._og_search_property(('test0', 'test1'), html), 'foo > < bar')
|
self.assertEqual(ie._og_search_property(('test0', 'test1'), html), 'foo > < bar')
|
||||||
self.assertRaises(RegexNotFoundError, ie._og_search_property, 'test0', html, None, fatal=True)
|
self.assertRaises(RegexNotFoundError, ie._og_search_property, 'test0', html, None, fatal=True)
|
||||||
self.assertRaises(RegexNotFoundError, ie._og_search_property, ('test0', 'test00'), html, None, fatal=True)
|
self.assertRaises(RegexNotFoundError, ie._og_search_property, ('test0', 'test00'), html, None, fatal=True)
|
||||||
|
self.assertEqual(ie._og_search_property('test4', html), 'unquoted-value')
|
||||||
|
|
||||||
def test_html_search_meta(self):
|
def test_html_search_meta(self):
|
||||||
ie = self.ie
|
ie = self.ie
|
||||||
@@ -1567,6 +1571,292 @@ def test_parse_ism_formats(self):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'ec-3_test',
|
||||||
|
'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
[{
|
||||||
|
'format_id': 'audio_deu_1-224',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'isma',
|
||||||
|
'tbr': 224,
|
||||||
|
'asr': 48000,
|
||||||
|
'vcodec': 'none',
|
||||||
|
'acodec': 'EC-3',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'audio',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 0,
|
||||||
|
'height': 0,
|
||||||
|
'fourcc': 'EC-3',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00',
|
||||||
|
'sampling_rate': 48000,
|
||||||
|
'channels': 6,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'audio_ext': 'isma',
|
||||||
|
'video_ext': 'none',
|
||||||
|
'abr': 224,
|
||||||
|
}, {
|
||||||
|
'format_id': 'audio_deu-127',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'isma',
|
||||||
|
'tbr': 127,
|
||||||
|
'asr': 48000,
|
||||||
|
'vcodec': 'none',
|
||||||
|
'acodec': 'AACL',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'audio',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 0,
|
||||||
|
'height': 0,
|
||||||
|
'fourcc': 'AACL',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '1190',
|
||||||
|
'sampling_rate': 48000,
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'audio_ext': 'isma',
|
||||||
|
'video_ext': 'none',
|
||||||
|
'abr': 127,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-23',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 384,
|
||||||
|
'height': 216,
|
||||||
|
'tbr': 23,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 384,
|
||||||
|
'height': 216,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 23,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-403',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 400,
|
||||||
|
'height': 224,
|
||||||
|
'tbr': 403,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 400,
|
||||||
|
'height': 224,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 403,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-680',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'tbr': 680,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 680,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-1253',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'tbr': 1253,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 1253,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-2121',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 768,
|
||||||
|
'height': 432,
|
||||||
|
'tbr': 2121,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 768,
|
||||||
|
'height': 432,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 2121,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-3275',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1280,
|
||||||
|
'height': 720,
|
||||||
|
'tbr': 3275,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1280,
|
||||||
|
'height': 720,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 3275,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-5300',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'tbr': 5300,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 5300,
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-8079',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'tbr': 8079,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'_download_params':
|
||||||
|
{
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
'video_ext': 'ismv',
|
||||||
|
'audio_ext': 'none',
|
||||||
|
'vbr': 8079,
|
||||||
|
}],
|
||||||
|
{},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||||
|
|||||||
@@ -68,8 +68,7 @@ def test_prefer_free_formats(self):
|
|||||||
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
|
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats)
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'webm')
|
self.assertEqual(downloaded['ext'], 'webm')
|
||||||
@@ -82,8 +81,7 @@ def test_prefer_free_formats(self):
|
|||||||
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
|
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'mp4')
|
self.assertEqual(downloaded['ext'], 'mp4')
|
||||||
@@ -97,8 +95,7 @@ def test_prefer_free_formats(self):
|
|||||||
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
|
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'mp4')
|
self.assertEqual(downloaded['ext'], 'mp4')
|
||||||
@@ -110,15 +107,14 @@ def test_prefer_free_formats(self):
|
|||||||
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
|
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'webm')
|
self.assertEqual(downloaded['ext'], 'webm')
|
||||||
|
|
||||||
def test_format_selection(self):
|
def test_format_selection(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': '35', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
|
{'format_id': '35', 'ext': 'mp4', 'preference': 0, 'url': TEST_URL},
|
||||||
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
|
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
|
||||||
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
|
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
|
||||||
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
|
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
|
||||||
@@ -186,22 +182,19 @@ def test_format_selection_audio_exts(self):
|
|||||||
|
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats)
|
||||||
ydl = YDL({'format': 'best'})
|
ydl = YDL({'format': 'best'})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'aac-64')
|
self.assertEqual(downloaded['format_id'], 'aac-64')
|
||||||
|
|
||||||
ydl = YDL({'format': 'mp3'})
|
ydl = YDL({'format': 'mp3'})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
||||||
|
|
||||||
ydl = YDL({'prefer_free_formats': True})
|
ydl = YDL({'prefer_free_formats': True})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'ogg-64')
|
self.assertEqual(downloaded['format_id'], 'ogg-64')
|
||||||
@@ -346,8 +339,7 @@ def format_info(f_id):
|
|||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], '248+172')
|
self.assertEqual(downloaded['format_id'], '248+172')
|
||||||
@@ -355,40 +347,35 @@ def format_info(f_id):
|
|||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], '38')
|
self.assertEqual(downloaded['format_id'], '38')
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['248+141'])
|
self.assertEqual(downloaded_ids, ['248+141'])
|
||||||
@@ -396,16 +383,14 @@ def format_info(f_id):
|
|||||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
ydl = YDL({'format': 'best/bestvideo'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||||
|
|
||||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
ydl = YDL({'format': 'best/bestvideo'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||||
@@ -480,7 +465,7 @@ def test_format_filtering(self):
|
|||||||
for f in formats:
|
for f in formats:
|
||||||
f['url'] = 'http://_/'
|
f['url'] = 'http://_/'
|
||||||
f['ext'] = 'unknown'
|
f['ext'] = 'unknown'
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats, _format_sort_fields=('id', ))
|
||||||
|
|
||||||
ydl = YDL({'format': 'best[filesize<3000]'})
|
ydl = YDL({'format': 'best[filesize<3000]'})
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
@@ -662,13 +647,17 @@ def test_add_extra_info(self):
|
|||||||
'playlist_autonumber': 2,
|
'playlist_autonumber': 2,
|
||||||
'__last_playlist_index': 100,
|
'__last_playlist_index': 100,
|
||||||
'n_entries': 10,
|
'n_entries': 10,
|
||||||
'formats': [{'id': 'id 1'}, {'id': 'id 2'}, {'id': 'id 3'}]
|
'formats': [
|
||||||
|
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
||||||
|
{'id': 'id 2', 'height': 720},
|
||||||
|
{'id': 'id 3'}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_prepare_outtmpl_and_filename(self):
|
def test_prepare_outtmpl_and_filename(self):
|
||||||
def test(tmpl, expected, *, info=None, **params):
|
def test(tmpl, expected, *, info=None, **params):
|
||||||
params['outtmpl'] = tmpl
|
params['outtmpl'] = tmpl
|
||||||
ydl = YoutubeDL(params)
|
ydl = FakeYDL(params)
|
||||||
ydl._num_downloads = 1
|
ydl._num_downloads = 1
|
||||||
self.assertEqual(ydl.validate_outtmpl(tmpl), None)
|
self.assertEqual(ydl.validate_outtmpl(tmpl), None)
|
||||||
|
|
||||||
@@ -729,6 +718,7 @@ def test(tmpl, expected, *, info=None, **params):
|
|||||||
self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
|
self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
|
||||||
test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
|
test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
|
||||||
test('%(..)s', 'NA')
|
test('%(..)s', 'NA')
|
||||||
|
test('%(formats.{id)s', 'NA')
|
||||||
|
|
||||||
# Entire info_dict
|
# Entire info_dict
|
||||||
def expect_same_infodict(out):
|
def expect_same_infodict(out):
|
||||||
@@ -813,6 +803,12 @@ def expect_same_infodict(out):
|
|||||||
test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
|
test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
|
||||||
test('%(formats.0.id.-1+id)f', '1235.000000')
|
test('%(formats.0.id.-1+id)f', '1235.000000')
|
||||||
test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
|
test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
|
||||||
|
out = json.dumps([{'id': f['id'], 'height.:2': str(f['height'])[:2]}
|
||||||
|
if 'height' in f else {'id': f['id']}
|
||||||
|
for f in FORMATS])
|
||||||
|
test('%(formats.:.{id,height.:2})j', (out, sanitize(out)))
|
||||||
|
test('%(formats.:.{id,height}.id)l', ', '.join(f['id'] for f in FORMATS))
|
||||||
|
test('%(.{id,title})j', ('{"id": "1234"}', '{"id": "1234"}'))
|
||||||
|
|
||||||
# Alternates
|
# Alternates
|
||||||
test('%(title,id)s', '1234')
|
test('%(title,id)s', '1234')
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
import base64
|
import base64
|
||||||
|
|
||||||
from yt_dlp.aes import (
|
from yt_dlp.aes import (
|
||||||
BLOCK_SIZE_BYTES,
|
|
||||||
aes_cbc_decrypt,
|
aes_cbc_decrypt,
|
||||||
aes_cbc_decrypt_bytes,
|
aes_cbc_decrypt_bytes,
|
||||||
aes_cbc_encrypt,
|
aes_cbc_encrypt,
|
||||||
@@ -27,7 +26,7 @@
|
|||||||
key_expansion,
|
key_expansion,
|
||||||
pad_block,
|
pad_block,
|
||||||
)
|
)
|
||||||
from yt_dlp.dependencies import Cryptodome_AES
|
from yt_dlp.dependencies import Cryptodome
|
||||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
||||||
|
|
||||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||||
@@ -49,7 +48,7 @@ def test_cbc_decrypt(self):
|
|||||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
@@ -79,7 +78,7 @@ def test_gcm_decrypt(self):
|
|||||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
||||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
@@ -103,8 +102,7 @@ def test_decrypt_text(self):
|
|||||||
|
|
||||||
def test_ecb_encrypt(self):
|
def test_ecb_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = bytes_to_intlist(self.secret_msg)
|
||||||
data += [0x08] * (BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES)
|
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
||||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key, self.iv))
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
from test.helper import is_download_test, try_rm
|
from test.helper import is_download_test, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.utils import DownloadError
|
||||||
|
|
||||||
|
|
||||||
def _download_restricted(url, filename, age):
|
def _download_restricted(url, filename, age):
|
||||||
@@ -25,10 +26,14 @@ def _download_restricted(url, filename, age):
|
|||||||
ydl.add_default_info_extractors()
|
ydl.add_default_info_extractors()
|
||||||
json_filename = os.path.splitext(filename)[0] + '.info.json'
|
json_filename = os.path.splitext(filename)[0] + '.info.json'
|
||||||
try_rm(json_filename)
|
try_rm(json_filename)
|
||||||
ydl.download([url])
|
try:
|
||||||
res = os.path.exists(json_filename)
|
ydl.download([url])
|
||||||
try_rm(json_filename)
|
except DownloadError:
|
||||||
return res
|
pass
|
||||||
|
else:
|
||||||
|
return os.path.exists(json_filename)
|
||||||
|
finally:
|
||||||
|
try_rm(json_filename)
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@@ -38,12 +43,12 @@ def _assert_restricted(self, url, filename, age, old_age=None):
|
|||||||
self.assertFalse(_download_restricted(url, filename, age))
|
self.assertFalse(_download_restricted(url, filename, age))
|
||||||
|
|
||||||
def test_youtube(self):
|
def test_youtube(self):
|
||||||
self._assert_restricted('07FYdnEawAQ', '07FYdnEawAQ.mp4', 10)
|
self._assert_restricted('HtVdAasjOgU', 'HtVdAasjOgU.mp4', 10)
|
||||||
|
|
||||||
def test_youporn(self):
|
def test_youporn(self):
|
||||||
self._assert_restricted(
|
self._assert_restricted(
|
||||||
'http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/',
|
'https://www.youporn.com/watch/16715086/sex-ed-in-detention-18-asmr/',
|
||||||
'505835.mp4', 2, old_age=25)
|
'16715086.mp4', 2, old_age=25)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -31,6 +31,9 @@ def test_compat_passthrough(self):
|
|||||||
# TODO: Test submodule
|
# TODO: Test submodule
|
||||||
# compat.asyncio.events # Must not raise error
|
# compat.asyncio.events # Must not raise error
|
||||||
|
|
||||||
|
with self.assertWarns(DeprecationWarning):
|
||||||
|
compat.compat_pycrypto_AES # Must not raise error
|
||||||
|
|
||||||
def test_compat_expanduser(self):
|
def test_compat_expanduser(self):
|
||||||
old_home = os.environ.get('HOME')
|
old_home = os.environ.get('HOME')
|
||||||
test_str = R'C:\Documents and Settings\тест\Application Data'
|
test_str = R'C:\Documents and Settings\тест\Application Data'
|
||||||
|
|||||||
227
test/test_config.py
Normal file
227
test/test_config.py
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import unittest.mock
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import itertools
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from yt_dlp.compat import compat_expanduser
|
||||||
|
from yt_dlp.options import create_parser, parseOpts
|
||||||
|
from yt_dlp.utils import Config, get_executable_path
|
||||||
|
|
||||||
|
ENVIRON_DEFAULTS = {
|
||||||
|
'HOME': None,
|
||||||
|
'XDG_CONFIG_HOME': '/_xdg_config_home/',
|
||||||
|
'USERPROFILE': 'C:/Users/testing/',
|
||||||
|
'APPDATA': 'C:/Users/testing/AppData/Roaming/',
|
||||||
|
'HOMEDRIVE': 'C:/',
|
||||||
|
'HOMEPATH': 'Users/testing/',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def set_environ(**kwargs):
|
||||||
|
saved_environ = os.environ.copy()
|
||||||
|
|
||||||
|
for name, value in {**ENVIRON_DEFAULTS, **kwargs}.items():
|
||||||
|
if value is None:
|
||||||
|
os.environ.pop(name, None)
|
||||||
|
else:
|
||||||
|
os.environ[name] = value
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(saved_environ)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_expected_groups():
|
||||||
|
xdg_config_home = os.getenv('XDG_CONFIG_HOME') or compat_expanduser('~/.config')
|
||||||
|
appdata_dir = os.getenv('appdata')
|
||||||
|
home_dir = compat_expanduser('~')
|
||||||
|
return {
|
||||||
|
'Portable': [
|
||||||
|
Path(get_executable_path(), 'yt-dlp.conf'),
|
||||||
|
],
|
||||||
|
'Home': [
|
||||||
|
Path('yt-dlp.conf'),
|
||||||
|
],
|
||||||
|
'User': [
|
||||||
|
Path(xdg_config_home, 'yt-dlp.conf'),
|
||||||
|
Path(xdg_config_home, 'yt-dlp', 'config'),
|
||||||
|
Path(xdg_config_home, 'yt-dlp', 'config.txt'),
|
||||||
|
*((
|
||||||
|
Path(appdata_dir, 'yt-dlp.conf'),
|
||||||
|
Path(appdata_dir, 'yt-dlp', 'config'),
|
||||||
|
Path(appdata_dir, 'yt-dlp', 'config.txt'),
|
||||||
|
) if appdata_dir else ()),
|
||||||
|
Path(home_dir, 'yt-dlp.conf'),
|
||||||
|
Path(home_dir, 'yt-dlp.conf.txt'),
|
||||||
|
Path(home_dir, '.yt-dlp', 'config'),
|
||||||
|
Path(home_dir, '.yt-dlp', 'config.txt'),
|
||||||
|
],
|
||||||
|
'System': [
|
||||||
|
Path('/etc/yt-dlp.conf'),
|
||||||
|
Path('/etc/yt-dlp/config'),
|
||||||
|
Path('/etc/yt-dlp/config.txt'),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfig(unittest.TestCase):
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config__ENVIRON_DEFAULTS_sanity(self):
|
||||||
|
expected = make_expected()
|
||||||
|
self.assertCountEqual(
|
||||||
|
set(expected), expected,
|
||||||
|
'ENVIRON_DEFAULTS produces non unique names')
|
||||||
|
|
||||||
|
def test_config_all_environ_values(self):
|
||||||
|
for name, value in ENVIRON_DEFAULTS.items():
|
||||||
|
for new_value in (None, '', '.', value or '/some/dir'):
|
||||||
|
with set_environ(**{name: new_value}):
|
||||||
|
self._simple_grouping_test()
|
||||||
|
|
||||||
|
def test_config_default_expected_locations(self):
|
||||||
|
files, _ = self._simple_config_test()
|
||||||
|
self.assertEqual(
|
||||||
|
files, make_expected(),
|
||||||
|
'Not all expected locations have been checked')
|
||||||
|
|
||||||
|
def test_config_default_grouping(self):
|
||||||
|
self._simple_grouping_test()
|
||||||
|
|
||||||
|
def _simple_grouping_test(self):
|
||||||
|
expected_groups = make_expected_groups()
|
||||||
|
for name, group in expected_groups.items():
|
||||||
|
for index, existing_path in enumerate(group):
|
||||||
|
result, opts = self._simple_config_test(existing_path)
|
||||||
|
expected = expected_from_expected_groups(expected_groups, existing_path)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expected,
|
||||||
|
f'The checked locations do not match the expected ({name}, {index})')
|
||||||
|
self.assertEqual(
|
||||||
|
opts.outtmpl['default'], '1',
|
||||||
|
f'The used result value was incorrect ({name}, {index})')
|
||||||
|
|
||||||
|
def _simple_config_test(self, *stop_paths):
|
||||||
|
encountered = 0
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal encountered
|
||||||
|
path = Path(filename)
|
||||||
|
paths.append(path)
|
||||||
|
if path in stop_paths:
|
||||||
|
encountered += 1
|
||||||
|
return ['-o', f'{encountered}']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
_, opts, _ = parseOpts([], False)
|
||||||
|
|
||||||
|
return paths, opts
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_early_exit_commandline(self):
|
||||||
|
self._early_exit_test(0, '--ignore-config')
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_early_exit_files(self):
|
||||||
|
for index, _ in enumerate(make_expected(), 1):
|
||||||
|
self._early_exit_test(index)
|
||||||
|
|
||||||
|
def _early_exit_test(self, allowed_reads, *args):
|
||||||
|
reads = 0
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal reads
|
||||||
|
reads += 1
|
||||||
|
|
||||||
|
if reads > allowed_reads:
|
||||||
|
self.fail('The remaining config was not ignored')
|
||||||
|
elif reads == allowed_reads:
|
||||||
|
return ['--ignore-config']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
parseOpts(args, False)
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_override_commandline(self):
|
||||||
|
self._override_test(0, '-o', 'pass')
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_override_files(self):
|
||||||
|
for index, _ in enumerate(make_expected(), 1):
|
||||||
|
self._override_test(index)
|
||||||
|
|
||||||
|
def _override_test(self, start_index, *args):
|
||||||
|
index = 0
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal index
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if index > start_index:
|
||||||
|
return ['-o', 'fail']
|
||||||
|
elif index == start_index:
|
||||||
|
return ['-o', 'pass']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
_, opts, _ = parseOpts(args, False)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
opts.outtmpl['default'], 'pass',
|
||||||
|
'The earlier group did not override the later ones')
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ConfigMock(read_file=None):
|
||||||
|
with unittest.mock.patch('yt_dlp.options.Config') as mock:
|
||||||
|
mock.return_value = Config(create_parser())
|
||||||
|
if read_file is not None:
|
||||||
|
mock.read_file = read_file
|
||||||
|
|
||||||
|
yield mock
|
||||||
|
|
||||||
|
|
||||||
|
def make_expected(*filepaths):
|
||||||
|
return expected_from_expected_groups(_generate_expected_groups(), *filepaths)
|
||||||
|
|
||||||
|
|
||||||
|
def make_expected_groups(*filepaths):
|
||||||
|
return _filter_expected_groups(_generate_expected_groups(), filepaths)
|
||||||
|
|
||||||
|
|
||||||
|
def expected_from_expected_groups(expected_groups, *filepaths):
|
||||||
|
return list(itertools.chain.from_iterable(
|
||||||
|
_filter_expected_groups(expected_groups, filepaths).values()))
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_expected_groups(expected, filepaths):
|
||||||
|
if not filepaths:
|
||||||
|
return expected
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for group, paths in expected.items():
|
||||||
|
new_paths = []
|
||||||
|
for path in paths:
|
||||||
|
new_paths.append(path)
|
||||||
|
if path in filepaths:
|
||||||
|
break
|
||||||
|
|
||||||
|
result[group] = new_paths
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
from yt_dlp import cookies
|
from yt_dlp import cookies
|
||||||
from yt_dlp.cookies import (
|
from yt_dlp.cookies import (
|
||||||
|
LenientSimpleCookie,
|
||||||
LinuxChromeCookieDecryptor,
|
LinuxChromeCookieDecryptor,
|
||||||
MacChromeCookieDecryptor,
|
MacChromeCookieDecryptor,
|
||||||
WindowsChromeCookieDecryptor,
|
WindowsChromeCookieDecryptor,
|
||||||
@@ -137,3 +138,163 @@ def test_safari_cookie_parsing(self):
|
|||||||
def test_pbkdf2_sha1(self):
|
def test_pbkdf2_sha1(self):
|
||||||
key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
|
key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
|
||||||
self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
|
self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
|
||||||
|
|
||||||
|
|
||||||
|
class TestLenientSimpleCookie(unittest.TestCase):
|
||||||
|
def _run_tests(self, *cases):
|
||||||
|
for message, raw_cookie, expected in cases:
|
||||||
|
cookie = LenientSimpleCookie(raw_cookie)
|
||||||
|
|
||||||
|
with self.subTest(message, expected=expected):
|
||||||
|
self.assertEqual(cookie.keys(), expected.keys(), message)
|
||||||
|
|
||||||
|
for key, expected_value in expected.items():
|
||||||
|
morsel = cookie[key]
|
||||||
|
if isinstance(expected_value, tuple):
|
||||||
|
expected_value, expected_attributes = expected_value
|
||||||
|
else:
|
||||||
|
expected_attributes = {}
|
||||||
|
|
||||||
|
attributes = {
|
||||||
|
key: value
|
||||||
|
for key, value in dict(morsel).items()
|
||||||
|
if value != ""
|
||||||
|
}
|
||||||
|
self.assertEqual(attributes, expected_attributes, message)
|
||||||
|
|
||||||
|
self.assertEqual(morsel.value, expected_value, message)
|
||||||
|
|
||||||
|
def test_parsing(self):
|
||||||
|
self._run_tests(
|
||||||
|
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
||||||
|
(
|
||||||
|
"Test basic cookie",
|
||||||
|
"chips=ahoy; vienna=finger",
|
||||||
|
{"chips": "ahoy", "vienna": "finger"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test quoted cookie",
|
||||||
|
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
||||||
|
{"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow '=' in an unquoted value",
|
||||||
|
"keebler=E=mc2",
|
||||||
|
{"keebler": "E=mc2"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow cookies with ':' in their name",
|
||||||
|
"key:term=value:term",
|
||||||
|
{"key:term": "value:term"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow '[' and ']' in cookie values",
|
||||||
|
"a=b; c=[; d=r; f=h",
|
||||||
|
{"a": "b", "c": "[", "d": "r", "f": "h"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test basic cookie attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test flag only cookie attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test flag only attribute with values",
|
||||||
|
"eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
|
||||||
|
{"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test special case for 'expires' attribute, 4 digit year",
|
||||||
|
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
||||||
|
{"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test special case for 'expires' attribute, 2 digit year",
|
||||||
|
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
||||||
|
{"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test extra spaces in keys and values",
|
||||||
|
"eggs = scrambled ; secure ; path = bar ; foo=foo ",
|
||||||
|
{"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test quoted attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
|
||||||
|
),
|
||||||
|
# Our own tests that CPython passes
|
||||||
|
(
|
||||||
|
"Allow ';' in quoted value",
|
||||||
|
'chips="a;hoy"; vienna=finger',
|
||||||
|
{"chips": "a;hoy", "vienna": "finger"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Keep only the last set value",
|
||||||
|
"a=c; a=b",
|
||||||
|
{"a": "b"},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_lenient_parsing(self):
|
||||||
|
self._run_tests(
|
||||||
|
(
|
||||||
|
"Ignore and try to skip invalid cookies",
|
||||||
|
'chips={"ahoy;": 1}; vienna="finger;"',
|
||||||
|
{"vienna": "finger;"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Ignore cookies without a name",
|
||||||
|
"a=b; unnamed; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Ignore '\"' cookie without name",
|
||||||
|
'a=b; "; c=d',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Skip all space separated values",
|
||||||
|
"x a=b c=d x; e=f",
|
||||||
|
{"a": "b", "c": "d", "e": "f"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Skip all space separated values",
|
||||||
|
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Expect quote mending",
|
||||||
|
'a=b; invalid="; c=d',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Reset morsel after invalid to not capture attributes",
|
||||||
|
"a=b; invalid; Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Reset morsel after invalid to not capture attributes",
|
||||||
|
"a=b; $invalid; $Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Continue after non-flag attribute without value",
|
||||||
|
"a=b; path; Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow cookie attributes with `$` prefix",
|
||||||
|
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Invalid Morsel keys should not result in an error",
|
||||||
|
"Key=Value; [Invalid]=Value; Another=Value",
|
||||||
|
{"Key": "Value", "Another": "Value"},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ def print_skipping(reason):
|
|||||||
params = tc.get('params', {})
|
params = tc.get('params', {})
|
||||||
if not info_dict.get('id'):
|
if not info_dict.get('id'):
|
||||||
raise Exception(f'Test {tname} definition incorrect - "id" key is not present')
|
raise Exception(f'Test {tname} definition incorrect - "id" key is not present')
|
||||||
elif not info_dict.get('ext'):
|
elif not info_dict.get('ext') and info_dict.get('_type', 'video') == 'video':
|
||||||
if params.get('skip_download') and params.get('ignore_no_formats_error'):
|
if params.get('skip_download') and params.get('ignore_no_formats_error'):
|
||||||
continue
|
continue
|
||||||
raise Exception(f'Test {tname} definition incorrect - "ext" key must be present to define the output file')
|
raise Exception(f'Test {tname} definition incorrect - "ext" key must be present to define the output file')
|
||||||
@@ -122,7 +122,8 @@ def print_skipping(reason):
|
|||||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||||
if is_playlist and 'playlist' not in test_case:
|
if is_playlist and 'playlist' not in test_case:
|
||||||
params.setdefault('extract_flat', 'in_playlist')
|
params.setdefault('extract_flat', 'in_playlist')
|
||||||
params.setdefault('playlistend', test_case.get('playlist_mincount'))
|
params.setdefault('playlistend', test_case.get(
|
||||||
|
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
||||||
params.setdefault('skip_download', True)
|
params.setdefault('skip_download', True)
|
||||||
|
|
||||||
ydl = YoutubeDL(params, auto_init=False)
|
ydl = YoutubeDL(params, auto_init=False)
|
||||||
@@ -212,6 +213,8 @@ def try_rm_tcs_files(tcs=None):
|
|||||||
tc_res_dict = res_dict['entries'][tc_num]
|
tc_res_dict = res_dict['entries'][tc_num]
|
||||||
# First, check test cases' data against extracted data alone
|
# First, check test cases' data against extracted data alone
|
||||||
expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
|
expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
|
||||||
|
if tc_res_dict.get('_type', 'video') != 'video':
|
||||||
|
continue
|
||||||
# Now, check downloaded file consistency
|
# Now, check downloaded file consistency
|
||||||
tc_filename = get_tc_filename(tc)
|
tc_filename = get_tc_filename(tc)
|
||||||
if not test_case.get('params', {}).get('skip_download', False):
|
if not test_case.get('params', {}).get('skip_download', False):
|
||||||
|
|||||||
@@ -11,41 +11,46 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp.utils import encodeArgument
|
from yt_dlp.utils import Popen
|
||||||
|
|
||||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
LAZY_EXTRACTORS = 'yt_dlp/extractor/lazy_extractors.py'
|
||||||
|
|
||||||
try:
|
|
||||||
_DEV_NULL = subprocess.DEVNULL
|
|
||||||
except AttributeError:
|
|
||||||
_DEV_NULL = open(os.devnull, 'wb')
|
|
||||||
|
|
||||||
|
|
||||||
class TestExecution(unittest.TestCase):
|
class TestExecution(unittest.TestCase):
|
||||||
def test_import(self):
|
def run_yt_dlp(self, exe=(sys.executable, 'yt_dlp/__main__.py'), opts=('--version', )):
|
||||||
subprocess.check_call([sys.executable, '-c', 'import yt_dlp'], cwd=rootDir)
|
stdout, stderr, returncode = Popen.run(
|
||||||
|
[*exe, '--ignore-config', *opts], cwd=rootDir, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
def test_module_exec(self):
|
print(stderr, file=sys.stderr)
|
||||||
subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
self.assertEqual(returncode, 0)
|
||||||
|
return stdout.strip(), stderr.strip()
|
||||||
|
|
||||||
def test_main_exec(self):
|
def test_main_exec(self):
|
||||||
subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
self.run_yt_dlp()
|
||||||
|
|
||||||
|
def test_import(self):
|
||||||
|
self.run_yt_dlp(exe=(sys.executable, '-c', 'import yt_dlp'))
|
||||||
|
|
||||||
|
def test_module_exec(self):
|
||||||
|
self.run_yt_dlp(exe=(sys.executable, '-m', 'yt_dlp'))
|
||||||
|
|
||||||
def test_cmdline_umlauts(self):
|
def test_cmdline_umlauts(self):
|
||||||
p = subprocess.Popen(
|
_, stderr = self.run_yt_dlp(opts=('ä', '--version'))
|
||||||
[sys.executable, 'yt_dlp/__main__.py', '--ignore-config', encodeArgument('ä'), '--version'],
|
|
||||||
cwd=rootDir, stdout=_DEV_NULL, stderr=subprocess.PIPE)
|
|
||||||
_, stderr = p.communicate()
|
|
||||||
self.assertFalse(stderr)
|
self.assertFalse(stderr)
|
||||||
|
|
||||||
def test_lazy_extractors(self):
|
def test_lazy_extractors(self):
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'], cwd=rootDir, stdout=_DEV_NULL)
|
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', LAZY_EXTRACTORS],
|
||||||
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=_DEV_NULL)
|
cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||||
|
self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
|
||||||
|
|
||||||
|
_, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
|
||||||
|
self.assertFalse(stderr)
|
||||||
|
|
||||||
|
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||||
finally:
|
finally:
|
||||||
with contextlib.suppress(OSError):
|
with contextlib.suppress(OSError):
|
||||||
os.remove('yt_dlp/extractor/lazy_extractors.py')
|
os.remove(LAZY_EXTRACTORS)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -7,8 +7,10 @@
|
|||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
|
||||||
from yt_dlp.jsinterp import JSInterpreter
|
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
||||||
|
|
||||||
|
|
||||||
class TestJSInterpreter(unittest.TestCase):
|
class TestJSInterpreter(unittest.TestCase):
|
||||||
@@ -66,6 +68,12 @@ def test_operators(self):
|
|||||||
jsi = JSInterpreter('function f(){return 0 && 1 || 2;}')
|
jsi = JSInterpreter('function f(){return 0 && 1 || 2;}')
|
||||||
self.assertEqual(jsi.call_function('f'), 2)
|
self.assertEqual(jsi.call_function('f'), 2)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function f(){return 0 ?? 42;}')
|
||||||
|
self.assertEqual(jsi.call_function('f'), 0)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function f(){return "life, the universe and everything" < 42;}')
|
||||||
|
self.assertFalse(jsi.call_function('f'))
|
||||||
|
|
||||||
def test_array_access(self):
|
def test_array_access(self):
|
||||||
jsi = JSInterpreter('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}')
|
jsi = JSInterpreter('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}')
|
||||||
self.assertEqual(jsi.call_function('f'), [5, 2, 7])
|
self.assertEqual(jsi.call_function('f'), [5, 2, 7])
|
||||||
@@ -124,6 +132,11 @@ def test_precedence(self):
|
|||||||
self.assertEqual(jsi.call_function('x'), [20, 20, 30, 40, 50])
|
self.assertEqual(jsi.call_function('x'), [20, 20, 30, 40, 50])
|
||||||
|
|
||||||
def test_builtins(self):
|
def test_builtins(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return NaN }
|
||||||
|
''')
|
||||||
|
self.assertTrue(math.isnan(jsi.call_function('x')))
|
||||||
|
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { return new Date('Wednesday 31 December 1969 18:01:26 MDT') - 0; }
|
function x() { return new Date('Wednesday 31 December 1969 18:01:26 MDT') - 0; }
|
||||||
''')
|
''')
|
||||||
@@ -142,6 +155,38 @@ def test_call(self):
|
|||||||
self.assertEqual(jsi.call_function('z'), 5)
|
self.assertEqual(jsi.call_function('z'), 5)
|
||||||
self.assertEqual(jsi.call_function('y'), 2)
|
self.assertEqual(jsi.call_function('y'), 2)
|
||||||
|
|
||||||
|
def test_if(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {
|
||||||
|
let a = 9;
|
||||||
|
if (0==0) {a++}
|
||||||
|
return a
|
||||||
|
}''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 10)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {
|
||||||
|
if (0==0) {return 10}
|
||||||
|
}''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 10)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {
|
||||||
|
if (0!=0) {return 1}
|
||||||
|
else {return 10}
|
||||||
|
}''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 10)
|
||||||
|
|
||||||
|
""" # Unsupported
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {
|
||||||
|
if (0!=0) {return 1}
|
||||||
|
else if (1==0) {return 2}
|
||||||
|
else {return 10}
|
||||||
|
}''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 10)
|
||||||
|
"""
|
||||||
|
|
||||||
def test_for_loop(self):
|
def test_for_loop(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { a=0; for (i=0; i-10; i++) {a++} return a }
|
function x() { a=0; for (i=0; i-10; i++) {a++} return a }
|
||||||
@@ -183,6 +228,30 @@ def test_try(self):
|
|||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x'), 10)
|
self.assertEqual(jsi.call_function('x'), 10)
|
||||||
|
|
||||||
|
def test_catch(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { try{throw 10} catch(e){return 5} }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 5)
|
||||||
|
|
||||||
|
def test_finally(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { try{throw 10} finally {return 42} }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 42)
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { try{throw 10} catch(e){return 5} finally {return 42} }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 42)
|
||||||
|
|
||||||
|
def test_nested_try(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {try {
|
||||||
|
try{throw 10} finally {throw 42}
|
||||||
|
} catch(e){return 5} }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 5)
|
||||||
|
|
||||||
def test_for_loop_continue(self):
|
def test_for_loop_continue(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { a=0; for (i=0; i-10; i++) { continue; a++ } return a }
|
function x() { a=0; for (i=0; i-10; i++) { continue; a++ } return a }
|
||||||
@@ -195,6 +264,14 @@ def test_for_loop_break(self):
|
|||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x'), 0)
|
self.assertEqual(jsi.call_function('x'), 0)
|
||||||
|
|
||||||
|
def test_for_loop_try(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() {
|
||||||
|
for (i=0; i-10; i++) { try { if (i == 5) throw i} catch {return 10} finally {break} };
|
||||||
|
return 42 }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 42)
|
||||||
|
|
||||||
def test_literal_list(self):
|
def test_literal_list(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { return [1, 2, "asdf", [5, 6, 7]][3] }
|
function x() { return [1, 2, "asdf", [5, 6, 7]][3] }
|
||||||
@@ -212,6 +289,11 @@ def test_comma(self):
|
|||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x'), 7)
|
self.assertEqual(jsi.call_function('x'), 7)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return (l=[0,1,2,3], function(a, b){return a+b})((l[1], l[2]), l[3]) }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 5)
|
||||||
|
|
||||||
def test_void(self):
|
def test_void(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { return void 42; }
|
function x() { return void 42; }
|
||||||
@@ -224,6 +306,145 @@ def test_return_function(self):
|
|||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x')([]), 1)
|
self.assertEqual(jsi.call_function('x')([]), 1)
|
||||||
|
|
||||||
|
def test_null(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return null; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), None)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [null > 0, null < 0, null == 0, null === 0]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, False, False, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [null >= 0, null <= 0]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [True, True])
|
||||||
|
|
||||||
|
def test_undefined(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return undefined === undefined; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), True)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return undefined; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let v; return v; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [undefined === undefined, undefined == undefined, undefined < undefined, undefined > undefined]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [True, True, False, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [undefined === 0, undefined == 0, undefined < 0, undefined > 0]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, False, False, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [undefined >= 0, undefined <= 0]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [undefined > null, undefined < null, undefined == null, undefined === null]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, False, True, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return [undefined === null, undefined == null, undefined < null, undefined > null]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, True, False, False])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let v; return [42+v, v+42, v**42, 42**v, 0**v]; }
|
||||||
|
''')
|
||||||
|
for y in jsi.call_function('x'):
|
||||||
|
self.assertTrue(math.isnan(y))
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let v; return v**0; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 1)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let v; return [v>42, v<=42, v&&42, 42&&v]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [False, False, JS_Undefined, JS_Undefined])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function x(){return undefined ?? 42; }')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 42)
|
||||||
|
|
||||||
|
def test_object(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { return {}; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), {})
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a = {m1: 42, m2: 0 }; return [a["m1"], a.m2]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), [42, 0])
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a; return a?.qq; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a = {m1: 42, m2: 0 }; return a?.qq; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||||
|
|
||||||
|
def test_regex(self):
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a=/,,[/,913,/](,)}/; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x'), None)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a=/,,[/,913,/](,)}/; return a; }
|
||||||
|
''')
|
||||||
|
self.assertIsInstance(jsi.call_function('x'), re.Pattern)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function x() { let a=/,,[/,913,/](,)}/i; return a; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x').flags & re.I, re.I)
|
||||||
|
|
||||||
|
jsi = JSInterpreter(R'''
|
||||||
|
function x() { let a=/,][}",],()}(\[)/; return a; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x').pattern, r',][}",],()}(\[)')
|
||||||
|
|
||||||
|
jsi = JSInterpreter(R'''
|
||||||
|
function x() { let a=[/[)\\]/]; return a[0]; }
|
||||||
|
''')
|
||||||
|
self.assertEqual(jsi.call_function('x').pattern, r'[)\\]')
|
||||||
|
|
||||||
|
def test_char_code_at(self):
|
||||||
|
jsi = JSInterpreter('function x(i){return "test".charCodeAt(i)}')
|
||||||
|
self.assertEqual(jsi.call_function('x', 0), 116)
|
||||||
|
self.assertEqual(jsi.call_function('x', 1), 101)
|
||||||
|
self.assertEqual(jsi.call_function('x', 2), 115)
|
||||||
|
self.assertEqual(jsi.call_function('x', 3), 116)
|
||||||
|
self.assertEqual(jsi.call_function('x', 4), None)
|
||||||
|
self.assertEqual(jsi.call_function('x', 'not_a_number'), 116)
|
||||||
|
|
||||||
|
def test_bitwise_operators_overflow(self):
|
||||||
|
jsi = JSInterpreter('function x(){return -524999584 << 5}')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 379882496)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function x(){return 1236566549 << 5}')
|
||||||
|
self.assertEqual(jsi.call_function('x'), 915423904)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
73
test/test_plugins.py
Normal file
73
test/test_plugins.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
|
||||||
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
||||||
|
|
||||||
|
|
||||||
|
class TestPlugins(unittest.TestCase):
|
||||||
|
|
||||||
|
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||||
|
|
||||||
|
def test_directories_containing_plugins(self):
|
||||||
|
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||||
|
|
||||||
|
def test_extractor_classes(self):
|
||||||
|
for module_name in tuple(sys.modules):
|
||||||
|
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
||||||
|
del sys.modules[module_name]
|
||||||
|
plugins_ie = load_plugins('extractor', 'IE')
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# don't load modules with underscore prefix
|
||||||
|
self.assertFalse(
|
||||||
|
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
|
||||||
|
'loaded module beginning with underscore')
|
||||||
|
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# Don't load extractors with underscore prefix
|
||||||
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# Don't load extractors not specified in __all__ (if supplied)
|
||||||
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
def test_postprocessor_classes(self):
|
||||||
|
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||||
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
|
def test_importing_zipped_module(self):
|
||||||
|
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||||
|
shutil.make_archive(str(zip_path)[:-4], 'zip', str(zip_path)[:-4])
|
||||||
|
sys.path.append(str(zip_path)) # add zip to search paths
|
||||||
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
try:
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
|
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
|
plugins_ie = load_plugins('extractor', 'IE')
|
||||||
|
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||||
|
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(zip_path))
|
||||||
|
os.remove(zip_path)
|
||||||
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -16,6 +16,7 @@
|
|||||||
MetadataFromFieldPP,
|
MetadataFromFieldPP,
|
||||||
MetadataParserPP,
|
MetadataParserPP,
|
||||||
ModifyChaptersPP,
|
ModifyChaptersPP,
|
||||||
|
SponsorBlockPP,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -76,11 +77,15 @@ def setUp(self):
|
|||||||
self._pp = ModifyChaptersPP(YoutubeDL())
|
self._pp = ModifyChaptersPP(YoutubeDL())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _sponsor_chapter(start, end, cat, remove=False):
|
def _sponsor_chapter(start, end, cat, remove=False, title=None):
|
||||||
c = {'start_time': start, 'end_time': end, '_categories': [(cat, start, end)]}
|
if title is None:
|
||||||
if remove:
|
title = SponsorBlockPP.CATEGORIES[cat]
|
||||||
c['remove'] = True
|
return {
|
||||||
return c
|
'start_time': start,
|
||||||
|
'end_time': end,
|
||||||
|
'_categories': [(cat, start, end, title)],
|
||||||
|
**({'remove': True} if remove else {}),
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _chapter(start, end, title=None, remove=False):
|
def _chapter(start, end, title=None, remove=False):
|
||||||
@@ -130,6 +135,19 @@ def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
|
|||||||
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
|
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||||
|
chapters = self._chapters([70], ['c']) + [
|
||||||
|
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
||||||
|
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
||||||
|
self._sponsor_chapter(30, 40, 'preview'),
|
||||||
|
self._sponsor_chapter(50, 60, 'filler')]
|
||||||
|
expected = self._chapters(
|
||||||
|
[10, 15, 16, 20, 30, 40, 50, 60, 70],
|
||||||
|
['c', '[SponsorBlock]: sb c1', '[SponsorBlock]: sb c1, sb c2', '[SponsorBlock]: sb c1',
|
||||||
|
'c', '[SponsorBlock]: Preview/Recap',
|
||||||
|
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||||
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
||||||
chapters = self._chapters([120], ['c']) + [
|
chapters = self._chapters([120], ['c']) + [
|
||||||
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||||
@@ -173,7 +191,7 @@ def test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle(self):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
||||||
cuts = [self._sponsor_chapter(20, 50, 'selpromo', remove=True)]
|
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
||||||
chapters = self._chapters([60], ['c']) + [
|
chapters = self._chapters([60], ['c']) + [
|
||||||
self._sponsor_chapter(10, 20, 'intro'),
|
self._sponsor_chapter(10, 20, 'intro'),
|
||||||
self._sponsor_chapter(30, 40, 'sponsor'),
|
self._sponsor_chapter(30, 40, 'sponsor'),
|
||||||
@@ -199,7 +217,7 @@ def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
|
|||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
||||||
self._chapter(30, 40, remove=True),
|
self._chapter(30, 40, remove=True),
|
||||||
self._sponsor_chapter(40, 50, 'selpromo', remove=True),
|
self._sponsor_chapter(40, 50, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(50, 60, 'interaction')]
|
self._sponsor_chapter(50, 60, 'interaction')]
|
||||||
expected = self._chapters([10, 20, 30, 40],
|
expected = self._chapters([10, 20, 30, 40],
|
||||||
['c', '[SponsorBlock]: Sponsor',
|
['c', '[SponsorBlock]: Sponsor',
|
||||||
@@ -282,7 +300,7 @@ def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
|
|||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = self._chapters([70], ['c']) + [
|
||||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 50, 'interaction'),
|
self._sponsor_chapter(20, 50, 'interaction'),
|
||||||
self._sponsor_chapter(30, 50, 'selpromo', remove=True),
|
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(40, 60, 'sponsor'),
|
self._sponsor_chapter(40, 60, 'sponsor'),
|
||||||
self._sponsor_chapter(50, 60, 'interaction')]
|
self._sponsor_chapter(50, 60, 'interaction')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
@@ -104,11 +105,13 @@
|
|||||||
sanitized_Request,
|
sanitized_Request,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
smuggle_url,
|
smuggle_url,
|
||||||
|
str_or_none,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
strip_jsonp,
|
strip_jsonp,
|
||||||
strip_or_none,
|
strip_or_none,
|
||||||
subtitles_filename,
|
subtitles_filename,
|
||||||
timeconvert,
|
timeconvert,
|
||||||
|
traverse_obj,
|
||||||
unescapeHTML,
|
unescapeHTML,
|
||||||
unified_strdate,
|
unified_strdate,
|
||||||
unified_timestamp,
|
unified_timestamp,
|
||||||
@@ -566,6 +569,7 @@ def test_base_url(self):
|
|||||||
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
||||||
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
||||||
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
||||||
|
self.assertEqual(base_url('http://foo.de/bar/baz&x=z&w=y/x/c'), 'http://foo.de/bar/baz&x=z&w=y/x/')
|
||||||
|
|
||||||
def test_urljoin(self):
|
def test_urljoin(self):
|
||||||
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
||||||
@@ -951,6 +955,85 @@ def test_escape_url(self):
|
|||||||
)
|
)
|
||||||
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
||||||
|
|
||||||
|
def test_js_to_json_vars_strings(self):
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'null': a,
|
||||||
|
'nullStr': b,
|
||||||
|
'true': c,
|
||||||
|
'trueStr': d,
|
||||||
|
'false': e,
|
||||||
|
'falseStr': f,
|
||||||
|
'unresolvedVar': g,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': 'null',
|
||||||
|
'b': '"null"',
|
||||||
|
'c': 'true',
|
||||||
|
'd': '"true"',
|
||||||
|
'e': 'false',
|
||||||
|
'f': '"false"',
|
||||||
|
'g': 'var',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'null': None,
|
||||||
|
'nullStr': 'null',
|
||||||
|
'true': True,
|
||||||
|
'trueStr': 'true',
|
||||||
|
'false': False,
|
||||||
|
'falseStr': 'false',
|
||||||
|
'unresolvedVar': 'var'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'int': a,
|
||||||
|
'intStr': b,
|
||||||
|
'float': c,
|
||||||
|
'floatStr': d,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': '123',
|
||||||
|
'b': '"123"',
|
||||||
|
'c': '1.23',
|
||||||
|
'd': '"1.23"',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'int': 123,
|
||||||
|
'intStr': '123',
|
||||||
|
'float': 1.23,
|
||||||
|
'floatStr': '1.23',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'object': a,
|
||||||
|
'objectStr': b,
|
||||||
|
'array': c,
|
||||||
|
'arrayStr': d,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': '{}',
|
||||||
|
'b': '"{}"',
|
||||||
|
'c': '[]',
|
||||||
|
'd': '"[]"',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'object': {},
|
||||||
|
'objectStr': '{}',
|
||||||
|
'array': [],
|
||||||
|
'arrayStr': '[]',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def test_js_to_json_realworld(self):
|
def test_js_to_json_realworld(self):
|
||||||
inp = '''{
|
inp = '''{
|
||||||
'clip':{'provider':'pseudo'}
|
'clip':{'provider':'pseudo'}
|
||||||
@@ -1097,6 +1180,12 @@ def test_js_to_json_edgecases(self):
|
|||||||
on = js_to_json('[1,//{},\n2]')
|
on = js_to_json('[1,//{},\n2]')
|
||||||
self.assertEqual(json.loads(on), [1, 2])
|
self.assertEqual(json.loads(on), [1, 2])
|
||||||
|
|
||||||
|
on = js_to_json(R'"\^\$\#"')
|
||||||
|
self.assertEqual(json.loads(on), R'^$#', msg='Unnecessary escapes should be stripped')
|
||||||
|
|
||||||
|
on = js_to_json('\'"\\""\'')
|
||||||
|
self.assertEqual(json.loads(on), '"""', msg='Unnecessary quote escape should be escaped')
|
||||||
|
|
||||||
def test_js_to_json_malformed(self):
|
def test_js_to_json_malformed(self):
|
||||||
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
||||||
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
||||||
@@ -1676,6 +1765,9 @@ def test_get_elements_text_and_html_by_attribute(self):
|
|||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||||
|
|
||||||
|
self.assertEqual(list(get_elements_text_and_html_by_attribute(
|
||||||
|
'class', 'foo', '<a class="foo">nice</a><span class="foo">nice</span>', tag='a')), [('nice', '<a class="foo">nice</a>')])
|
||||||
|
|
||||||
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
||||||
random text lorem ipsum</p>
|
random text lorem ipsum</p>
|
||||||
<div>
|
<div>
|
||||||
@@ -1862,6 +1954,8 @@ def test_get_compatible_ext(self):
|
|||||||
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
|
||||||
self.assertEqual(get_compatible_ext(
|
self.assertEqual(get_compatible_ext(
|
||||||
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['weba']), 'webm')
|
||||||
|
|
||||||
self.assertEqual(get_compatible_ext(
|
self.assertEqual(get_compatible_ext(
|
||||||
vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
|
vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
|
||||||
@@ -1873,6 +1967,301 @@ def test_get_compatible_ext(self):
|
|||||||
self.assertEqual(get_compatible_ext(
|
self.assertEqual(get_compatible_ext(
|
||||||
vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
|
vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
|
||||||
|
|
||||||
|
def test_traverse_obj(self):
|
||||||
|
_TEST_DATA = {
|
||||||
|
100: 100,
|
||||||
|
1.2: 1.2,
|
||||||
|
'str': 'str',
|
||||||
|
'None': None,
|
||||||
|
'...': ...,
|
||||||
|
'urls': [
|
||||||
|
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||||
|
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||||
|
],
|
||||||
|
'data': (
|
||||||
|
{'index': 2},
|
||||||
|
{'index': 3},
|
||||||
|
),
|
||||||
|
'dict': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test base functionality
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
||||||
|
msg='allow tuple path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
||||||
|
msg='allow list path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
||||||
|
msg='allow iterable path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
||||||
|
msg='single items should be treated as a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
||||||
|
|
||||||
|
# Test Ellipsis behavior
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
|
||||||
|
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
||||||
|
msg='`...` should give all non discarded values')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
|
||||||
|
msg='`...` selection for dicts should select all values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='nested `...` queries should work')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
|
||||||
|
msg='`...` query result should be flattened')
|
||||||
|
|
||||||
|
# Test function as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
||||||
|
[_TEST_DATA['urls']],
|
||||||
|
msg='function as query key should perform a filter based on (key, value)')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
|
||||||
|
msg='exceptions in the query function should be catched')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a: ...)
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a, b, c: ...)
|
||||||
|
|
||||||
|
# Test set as key (transformation/type, like `expected_type`)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper}, )), ['STR'],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str})), ['str'],
|
||||||
|
msg='Type in set should be a type filter')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {dict}), _TEST_DATA,
|
||||||
|
msg='A single set should be wrapped into a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper})), ['STR'],
|
||||||
|
msg='Transformation function should not raise')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str_or_none})),
|
||||||
|
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, set())
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, {str.upper, str})
|
||||||
|
|
||||||
|
# Test alternative paths
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
||||||
|
msg='multiple `paths` should be treated as alternative paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
||||||
|
msg='alternatives should exit early')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
||||||
|
msg='alternatives should return `default` if exhausted')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
|
||||||
|
msg='alternatives should track their own branching return')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
|
||||||
|
msg='alternatives on empty objects should search further')
|
||||||
|
|
||||||
|
# Test branch and path nesting
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='tuple as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='list as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
||||||
|
msg='double nesting in path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
||||||
|
msg='do not fail early on branching')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='tripple nesting in path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='ellipsis as branch path start gets flattened')
|
||||||
|
|
||||||
|
# Test dictionary as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
||||||
|
msg='dict key should result in a dict with the same keys')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
||||||
|
{0: 'https://www.example.com/0'},
|
||||||
|
msg='dict key should allow paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='tuple in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='double nesting in dict path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
||||||
|
msg='tripple nesting in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
||||||
|
msg='remove `None` values when top level dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
|
||||||
|
msg='use `default` if key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
||||||
|
msg='remove empty values when dict key')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: ...},
|
||||||
|
msg='use `default` when dict key and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
||||||
|
msg='remove empty values when nested dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
||||||
|
msg='default to dict if pruned')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=...), {0: ...},
|
||||||
|
msg='default to dict if pruned and default is given')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...), {0: {0: ...}},
|
||||||
|
msg='use nested `default` when nested dict key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {},
|
||||||
|
msg='remove key if branch in dict key not successful')
|
||||||
|
|
||||||
|
# Testing default parameter behavior
|
||||||
|
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
||||||
|
msg='default value should be `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
|
||||||
|
msg='chained fails should result in default')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
||||||
|
msg='should not short cirquit on `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
||||||
|
msg='invalid dict key should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
||||||
|
msg='`None` is a deliberate sentinel and should become `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
||||||
|
msg='`IndexError` should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
|
||||||
|
msg='if branched but not successful return `default` if defined, not `[]`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
|
||||||
|
msg='if branched but not successful return `default` even if `default` is `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
|
||||||
|
msg='if branched but not successful return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
|
||||||
|
msg='if branched but object is empty return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(None, ...), [],
|
||||||
|
msg='if branched but object is `None` return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj({0: None}, (0, ...)), [],
|
||||||
|
msg='if branched but state is `None` return `[]`, not `default`')
|
||||||
|
|
||||||
|
branching_paths = [
|
||||||
|
('fail', ...),
|
||||||
|
(..., 'fail'),
|
||||||
|
100 * ('fail',) + (...,),
|
||||||
|
(...,) + 100 * ('fail',),
|
||||||
|
]
|
||||||
|
for branching_path in branching_paths:
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path), [],
|
||||||
|
msg='if branched but state is `None`, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
||||||
|
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
||||||
|
msg='if branching in last alternative and previous did match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
||||||
|
msg='if branching in first alternative and non-branching path does match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
||||||
|
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
||||||
|
|
||||||
|
# Testing expected_type behavior
|
||||||
|
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
||||||
|
'str', msg='accept matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
||||||
|
None, msg='reject non matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
||||||
|
'0', msg='transform type using type function')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
||||||
|
None, msg='wrap expected_type fuction in try_call')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str),
|
||||||
|
['str'], msg='eliminate items that expected_type fails on')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
||||||
|
{0: 100}, msg='type as expected_type should filter dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
||||||
|
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int),
|
||||||
|
1, msg='expected_type should not filter non final dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
||||||
|
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)),
|
||||||
|
[{0: ...}, {0: ...}], msg='expected_type should transform branched dict values')
|
||||||
|
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
||||||
|
[4], msg='expected_type regression for type matching in tuple branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['data', ...], expected_type=int),
|
||||||
|
[], msg='expected_type regression for type matching in dict result')
|
||||||
|
|
||||||
|
# Test get_all behavior
|
||||||
|
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
|
||||||
|
msg='if not `get_all`, return only first matching value')
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
|
||||||
|
msg='do not overflatten if not `get_all`')
|
||||||
|
|
||||||
|
# Test casesense behavior
|
||||||
|
_CASESENSE_DATA = {
|
||||||
|
'KeY': 'value0',
|
||||||
|
0: {
|
||||||
|
'KeY': 'value1',
|
||||||
|
0: {'KeY': 'value2'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
||||||
|
msg='dict keys should be case sensitive unless `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
||||||
|
casesense=False), 'value0',
|
||||||
|
msg='allow non matching key case if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
||||||
|
casesense=False), ['value1'],
|
||||||
|
msg='allow non matching key case in branch if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
||||||
|
casesense=False), ['value2'],
|
||||||
|
msg='allow non matching key case in branch path if `casesense`')
|
||||||
|
|
||||||
|
# Test traverse_string behavior
|
||||||
|
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
||||||
|
msg='do not traverse into string if not `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
||||||
|
traverse_string=True), 's',
|
||||||
|
msg='traverse into string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
||||||
|
traverse_string=True), '.',
|
||||||
|
msg='traverse into converted data if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
|
||||||
|
traverse_string=True), 'str',
|
||||||
|
msg='`...` should result in string (same value) if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
||||||
|
traverse_string=True), 'sr',
|
||||||
|
msg='`slice` should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"),
|
||||||
|
traverse_string=True), 'str',
|
||||||
|
msg='function should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
||||||
|
traverse_string=True), ['s', 'r'],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
|
||||||
|
# Test is_user_input behavior
|
||||||
|
_IS_USER_INPUT_DATA = {'range8': list(range(8))}
|
||||||
|
self.assertEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3'),
|
||||||
|
is_user_input=True), 3,
|
||||||
|
msg='allow for string indexing if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3:'),
|
||||||
|
is_user_input=True), tuple(range(8))[3:],
|
||||||
|
msg='allow for string slice if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':4:2'),
|
||||||
|
is_user_input=True), tuple(range(8))[:4:2],
|
||||||
|
msg='allow step in string slice if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':'),
|
||||||
|
is_user_input=True), range(8),
|
||||||
|
msg='`:` should be treated as `...` if `is_user_input`')
|
||||||
|
with self.assertRaises(TypeError, msg='too many params should result in error'):
|
||||||
|
traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':::'), is_user_input=True)
|
||||||
|
|
||||||
|
# Test re.Match as input obj
|
||||||
|
mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
|
||||||
|
self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
|
||||||
|
msg='`...` on a `re.Match` should give its `groups()`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give groupno, value starting at 0')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
||||||
|
msg='str key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 2), '3',
|
||||||
|
msg='int key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
||||||
|
msg='str key on a `re.Match` should respect casesense')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 8), None,
|
||||||
|
msg='failing int key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give group name as well')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
from test.helper import FakeYDL, is_download_test
|
from test.helper import FakeYDL, is_download_test
|
||||||
from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
|
from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
|
||||||
|
from yt_dlp.utils import ExtractorError
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@@ -53,6 +54,18 @@ def test_youtube_flat_playlist_extraction(self):
|
|||||||
self.assertEqual(video['duration'], 10)
|
self.assertEqual(video['duration'], 10)
|
||||||
self.assertEqual(video['uploader'], 'Philipp Hagemeister')
|
self.assertEqual(video['uploader'], 'Philipp Hagemeister')
|
||||||
|
|
||||||
|
def test_youtube_channel_no_uploads(self):
|
||||||
|
dl = FakeYDL()
|
||||||
|
dl.params['extract_flat'] = True
|
||||||
|
ie = YoutubeTabIE(dl)
|
||||||
|
# no uploads
|
||||||
|
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||||
|
ie.extract('https://www.youtube.com/channel/UC2yXPzFejc422buOIzn_0CA')
|
||||||
|
|
||||||
|
# no uploads and no UCID given
|
||||||
|
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||||
|
ie.extract('https://www.youtube.com/news')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -66,6 +66,10 @@
|
|||||||
]
|
]
|
||||||
|
|
||||||
_NSIG_TESTS = [
|
_NSIG_TESTS = [
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/7862ca1f/player_ias.vflset/en_US/base.js',
|
||||||
|
'X_LCxVDjAavgE5t', 'yxJ1dM6iz5ogUg',
|
||||||
|
),
|
||||||
(
|
(
|
||||||
'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
|
'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
|
||||||
'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
|
'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
|
||||||
@@ -102,6 +106,42 @@
|
|||||||
'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
|
'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
|
||||||
'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
|
'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/c81bbb4a/player_ias.vflset/en_US/base.js',
|
||||||
|
'gre3EcLurNY2vqp94', 'Z9DfGxWP115WTg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/1f7d5369/player_ias.vflset/en_US/base.js',
|
||||||
|
'batNX7sYqIJdkJ', 'IhOkL_zxbkOZBw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/009f1d77/player_ias.vflset/en_US/base.js',
|
||||||
|
'5dwFHw8aFWQUQtffRq', 'audescmLUzI3jw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/dc0c6770/player_ias.vflset/en_US/base.js',
|
||||||
|
'5EHDMgYLV6HPGk_Mu-kk', 'n9lUJLHbxUI0GQ',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/113ca41c/player_ias.vflset/en_US/base.js',
|
||||||
|
'cgYl-tlYkhjT7A', 'hI7BBr2zUgcmMg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/c57c113c/player_ias.vflset/en_US/base.js',
|
||||||
|
'M92UUMHa8PdvPd3wyM', '3hPqLJsiNZx7yA',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5a3b6271/player_ias.vflset/en_US/base.js',
|
||||||
|
'B2j7f_UPT4rfje85Lu_e', 'm5DmNymaGQ5RdQ',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/7a062b77/player_ias.vflset/en_US/base.js',
|
||||||
|
'NRcE3y3mVtm_cV-W', 'VbsCYUATvqlt5w',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/dac945fd/player_ias.vflset/en_US/base.js',
|
||||||
|
'o8BkRxXhuYsBCWi6RplPdP', '3Lx32v_hmzTm6A',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?><!--Transformed by VSMT using XSL stylesheet for rule Identity--><!-- Created with Unified Streaming Platform (version=1.10.12-18737) --><SmoothStreamingMedia MajorVersion="2" MinorVersion="0" TimeScale="10000000" Duration="370000000"><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="127802" CodecPrivateData="1190" SamplingRate="48000" Channels="2" BitsPerSample="16" PacketSize="4" AudioTag="255" FourCC="AACL" /><c t="0" d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="7253333" /></StreamIndex><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu_1" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu_1={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="224000" CodecPrivateData="00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00" FourCCData="0700200F00" SamplingRate="48000" Channels="6" BitsPerSample="16" PacketSize="896" AudioTag="65534" FourCC="EC-3" /><c t="0" d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="8320000" /></StreamIndex><StreamIndex Type="video" QualityLevels="8" TimeScale="10000000" Language="deu" Name="video_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(video_deu={start time})?noStreamProfile=1" MaxWidth="1920" MaxHeight="1080" DisplayWidth="1920" DisplayHeight="1080"><QualityLevel Index="0" Bitrate="23909" CodecPrivateData="000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8" MaxWidth="384" MaxHeight="216" FourCC="AVC1" /><QualityLevel Index="1" Bitrate="403188" CodecPrivateData="00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2" MaxWidth="400" MaxHeight="224" FourCC="AVC1" /><QualityLevel Index="2" Bitrate="680365" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="3" Bitrate="1253465" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="4" Bitrate="2121558" CodecPrivateData="00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80" MaxWidth="768" MaxHeight="432" FourCC="AVC1" /><QualityLevel Index="5" Bitrate="3275545" CodecPrivateData="00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80" MaxWidth="1280" MaxHeight="720" FourCC="AVC1" /><QualityLevel Index="6" Bitrate="5300196" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><QualityLevel Index="7" Bitrate="8079312" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><c t="0" d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="10000000" /></StreamIndex></SmoothStreamingMedia>
|
||||||
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class IgnorePluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class IgnoreNotInAllPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InAllPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['InAllPluginIE']
|
||||||
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class NormalPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _IgnoreUnderscorePluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class NormalPluginPP(PostProcessor):
|
||||||
|
pass
|
||||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class ZippedPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class ZippedPluginPP(PostProcessor):
|
||||||
|
pass
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -16,11 +16,9 @@
|
|||||||
|
|
||||||
from .compat import compat_shlex_quote
|
from .compat import compat_shlex_quote
|
||||||
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
|
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
|
||||||
from .downloader import FileDownloader
|
|
||||||
from .downloader.external import get_external_downloader
|
from .downloader.external import get_external_downloader
|
||||||
from .extractor import list_extractor_classes
|
from .extractor import list_extractor_classes
|
||||||
from .extractor.adobepass import MSO_INFO
|
from .extractor.adobepass import MSO_INFO
|
||||||
from .extractor.common import InfoExtractor
|
|
||||||
from .options import parseOpts
|
from .options import parseOpts
|
||||||
from .postprocessor import (
|
from .postprocessor import (
|
||||||
FFmpegExtractAudioPP,
|
FFmpegExtractAudioPP,
|
||||||
@@ -40,6 +38,7 @@
|
|||||||
DateRange,
|
DateRange,
|
||||||
DownloadCancelled,
|
DownloadCancelled,
|
||||||
DownloadError,
|
DownloadError,
|
||||||
|
FormatSorter,
|
||||||
GeoUtils,
|
GeoUtils,
|
||||||
PlaylistEntries,
|
PlaylistEntries,
|
||||||
SameFileError,
|
SameFileError,
|
||||||
@@ -50,6 +49,7 @@
|
|||||||
format_field,
|
format_field,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
match_filter_func,
|
match_filter_func,
|
||||||
|
parse_bytes,
|
||||||
parse_duration,
|
parse_duration,
|
||||||
preferredencoding,
|
preferredencoding,
|
||||||
read_batch_urls,
|
read_batch_urls,
|
||||||
@@ -63,6 +63,8 @@
|
|||||||
)
|
)
|
||||||
from .YoutubeDL import YoutubeDL
|
from .YoutubeDL import YoutubeDL
|
||||||
|
|
||||||
|
_IN_CLI = False
|
||||||
|
|
||||||
|
|
||||||
def _exit(status=0, *args):
|
def _exit(status=0, *args):
|
||||||
for msg in args:
|
for msg in args:
|
||||||
@@ -89,12 +91,11 @@ def get_urls(urls, batchfile, verbose):
|
|||||||
|
|
||||||
|
|
||||||
def print_extractor_information(opts, urls):
|
def print_extractor_information(opts, urls):
|
||||||
# Importing GenericIE is currently slow since it imports other extractors
|
|
||||||
# TODO: Move this back to module level after generalization of embed detection
|
|
||||||
from .extractor.generic import GenericIE
|
|
||||||
|
|
||||||
out = ''
|
out = ''
|
||||||
if opts.list_extractors:
|
if opts.list_extractors:
|
||||||
|
# Importing GenericIE is currently slow since it imports YoutubeIE
|
||||||
|
from .extractor.generic import GenericIE
|
||||||
|
|
||||||
urls = dict.fromkeys(urls, False)
|
urls = dict.fromkeys(urls, False)
|
||||||
for ie in list_extractor_classes(opts.age_limit):
|
for ie in list_extractor_classes(opts.age_limit):
|
||||||
out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
|
out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
|
||||||
@@ -150,7 +151,7 @@ def set_default_compat(compat_name, opt_name, default=True, remove_compat=True):
|
|||||||
else:
|
else:
|
||||||
opts.embed_infojson = False
|
opts.embed_infojson = False
|
||||||
if 'format-sort' in opts.compat_opts:
|
if 'format-sort' in opts.compat_opts:
|
||||||
opts.format_sort.extend(InfoExtractor.FormatSort.ytdl_default)
|
opts.format_sort.extend(FormatSorter.ytdl_default)
|
||||||
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
||||||
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
||||||
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
||||||
@@ -225,7 +226,7 @@ def validate_minmax(min_val, max_val, min_name, max_name=None):
|
|||||||
|
|
||||||
# Format sort
|
# Format sort
|
||||||
for f in opts.format_sort:
|
for f in opts.format_sort:
|
||||||
validate_regex('format sorting', f, InfoExtractor.FormatSort.regex)
|
validate_regex('format sorting', f, FormatSorter.regex)
|
||||||
|
|
||||||
# Postprocessor formats
|
# Postprocessor formats
|
||||||
validate_regex('merge output format', opts.merge_output_format,
|
validate_regex('merge output format', opts.merge_output_format,
|
||||||
@@ -279,19 +280,19 @@ def parse_sleep_func(expr):
|
|||||||
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
|
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
|
||||||
|
|
||||||
# Bytes
|
# Bytes
|
||||||
def parse_bytes(name, value):
|
def validate_bytes(name, value):
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
numeric_limit = FileDownloader.parse_bytes(value)
|
numeric_limit = parse_bytes(value)
|
||||||
validate(numeric_limit is not None, 'rate limit', value)
|
validate(numeric_limit is not None, 'rate limit', value)
|
||||||
return numeric_limit
|
return numeric_limit
|
||||||
|
|
||||||
opts.ratelimit = parse_bytes('rate limit', opts.ratelimit)
|
opts.ratelimit = validate_bytes('rate limit', opts.ratelimit)
|
||||||
opts.throttledratelimit = parse_bytes('throttled rate limit', opts.throttledratelimit)
|
opts.throttledratelimit = validate_bytes('throttled rate limit', opts.throttledratelimit)
|
||||||
opts.min_filesize = parse_bytes('min filesize', opts.min_filesize)
|
opts.min_filesize = validate_bytes('min filesize', opts.min_filesize)
|
||||||
opts.max_filesize = parse_bytes('max filesize', opts.max_filesize)
|
opts.max_filesize = validate_bytes('max filesize', opts.max_filesize)
|
||||||
opts.buffersize = parse_bytes('buffer size', opts.buffersize)
|
opts.buffersize = validate_bytes('buffer size', opts.buffersize)
|
||||||
opts.http_chunk_size = parse_bytes('http chunk size', opts.http_chunk_size)
|
opts.http_chunk_size = validate_bytes('http chunk size', opts.http_chunk_size)
|
||||||
|
|
||||||
# Output templates
|
# Output templates
|
||||||
def validate_outtmpl(tmpl, msg):
|
def validate_outtmpl(tmpl, msg):
|
||||||
@@ -317,21 +318,18 @@ def validate_outtmpl(tmpl, msg):
|
|||||||
if outtmpl_default == '':
|
if outtmpl_default == '':
|
||||||
opts.skip_download = None
|
opts.skip_download = None
|
||||||
del opts.outtmpl['default']
|
del opts.outtmpl['default']
|
||||||
if outtmpl_default and not os.path.splitext(outtmpl_default)[1] and opts.extractaudio:
|
|
||||||
raise ValueError(
|
|
||||||
'Cannot download a video and extract audio into the same file! '
|
|
||||||
f'Use "{outtmpl_default}.%(ext)s" instead of "{outtmpl_default}" as the output template')
|
|
||||||
|
|
||||||
def parse_chapters(name, value):
|
def parse_chapters(name, value):
|
||||||
chapters, ranges = [], []
|
chapters, ranges = [], []
|
||||||
|
parse_timestamp = lambda x: float('inf') if x in ('inf', 'infinite') else parse_duration(x)
|
||||||
for regex in value or []:
|
for regex in value or []:
|
||||||
if regex.startswith('*'):
|
if regex.startswith('*'):
|
||||||
for range in regex[1:].split(','):
|
for range_ in map(str.strip, regex[1:].split(',')):
|
||||||
dur = tuple(map(parse_duration, range.strip().split('-')))
|
mobj = range_ != '-' and re.fullmatch(r'([^-]+)?\s*-\s*([^-]+)?', range_)
|
||||||
if len(dur) == 2 and all(t is not None for t in dur):
|
dur = mobj and (parse_timestamp(mobj.group(1) or '0'), parse_timestamp(mobj.group(2) or 'inf'))
|
||||||
ranges.append(dur)
|
if None in (dur or [None]):
|
||||||
else:
|
raise ValueError(f'invalid {name} time range "{regex}". Must be of the form "*start-end"')
|
||||||
raise ValueError(f'invalid {name} time range "{regex}". Must be of the form *start-end')
|
ranges.append(dur)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
chapters.append(re.compile(regex))
|
chapters.append(re.compile(regex))
|
||||||
@@ -344,10 +342,16 @@ def parse_chapters(name, value):
|
|||||||
|
|
||||||
# Cookies from browser
|
# Cookies from browser
|
||||||
if opts.cookiesfrombrowser:
|
if opts.cookiesfrombrowser:
|
||||||
mobj = re.match(r'(?P<name>[^+:]+)(\s*\+\s*(?P<keyring>[^:]+))?(\s*:(?P<profile>.+))?', opts.cookiesfrombrowser)
|
container = None
|
||||||
|
mobj = re.fullmatch(r'''(?x)
|
||||||
|
(?P<name>[^+:]+)
|
||||||
|
(?:\s*\+\s*(?P<keyring>[^:]+))?
|
||||||
|
(?:\s*:\s*(?!:)(?P<profile>.+?))?
|
||||||
|
(?:\s*::\s*(?P<container>.+))?
|
||||||
|
''', opts.cookiesfrombrowser)
|
||||||
if mobj is None:
|
if mobj is None:
|
||||||
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
|
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
|
||||||
browser_name, keyring, profile = mobj.group('name', 'keyring', 'profile')
|
browser_name, keyring, profile, container = mobj.group('name', 'keyring', 'profile', 'container')
|
||||||
browser_name = browser_name.lower()
|
browser_name = browser_name.lower()
|
||||||
if browser_name not in SUPPORTED_BROWSERS:
|
if browser_name not in SUPPORTED_BROWSERS:
|
||||||
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
|
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
|
||||||
@@ -357,7 +361,7 @@ def parse_chapters(name, value):
|
|||||||
if keyring not in SUPPORTED_KEYRINGS:
|
if keyring not in SUPPORTED_KEYRINGS:
|
||||||
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
||||||
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
||||||
opts.cookiesfrombrowser = (browser_name, profile, keyring)
|
opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
|
||||||
|
|
||||||
# MetadataParser
|
# MetadataParser
|
||||||
def metadataparser_actions(f):
|
def metadataparser_actions(f):
|
||||||
@@ -378,10 +382,12 @@ def metadataparser_actions(f):
|
|||||||
raise ValueError(f'{cmd} is invalid; {err}')
|
raise ValueError(f'{cmd} is invalid; {err}')
|
||||||
yield action
|
yield action
|
||||||
|
|
||||||
parse_metadata = opts.parse_metadata or []
|
|
||||||
if opts.metafromtitle is not None:
|
if opts.metafromtitle is not None:
|
||||||
parse_metadata.append('title:%s' % opts.metafromtitle)
|
opts.parse_metadata.setdefault('pre_process', []).append('title:%s' % opts.metafromtitle)
|
||||||
opts.parse_metadata = list(itertools.chain(*map(metadataparser_actions, parse_metadata)))
|
opts.parse_metadata = {
|
||||||
|
k: list(itertools.chain(*map(metadataparser_actions, v)))
|
||||||
|
for k, v in opts.parse_metadata.items()
|
||||||
|
}
|
||||||
|
|
||||||
# Other options
|
# Other options
|
||||||
if opts.playlist_items is not None:
|
if opts.playlist_items is not None:
|
||||||
@@ -397,11 +403,14 @@ def metadataparser_actions(f):
|
|||||||
except Exception:
|
except Exception:
|
||||||
raise ValueError('unsupported geo-bypass country or ip-block')
|
raise ValueError('unsupported geo-bypass country or ip-block')
|
||||||
|
|
||||||
opts.match_filter = match_filter_func(opts.match_filter)
|
opts.match_filter = match_filter_func(opts.match_filter, opts.breaking_match_filter)
|
||||||
|
|
||||||
if opts.download_archive is not None:
|
if opts.download_archive is not None:
|
||||||
opts.download_archive = expand_path(opts.download_archive)
|
opts.download_archive = expand_path(opts.download_archive)
|
||||||
|
|
||||||
|
if opts.ffmpeg_location is not None:
|
||||||
|
opts.ffmpeg_location = expand_path(opts.ffmpeg_location)
|
||||||
|
|
||||||
if opts.user_agent is not None:
|
if opts.user_agent is not None:
|
||||||
opts.headers.setdefault('User-Agent', opts.user_agent)
|
opts.headers.setdefault('User-Agent', opts.user_agent)
|
||||||
if opts.referer is not None:
|
if opts.referer is not None:
|
||||||
@@ -477,7 +486,7 @@ def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_u
|
|||||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
val1=opts.sponskrub and opts.sponskrub_cut)
|
||||||
|
|
||||||
# Conflicts with --allow-unplayable-formats
|
# Conflicts with --allow-unplayable-formats
|
||||||
report_conflict('--add-metadata', 'addmetadata')
|
report_conflict('--embed-metadata', 'addmetadata')
|
||||||
report_conflict('--embed-chapters', 'addchapters')
|
report_conflict('--embed-chapters', 'addchapters')
|
||||||
report_conflict('--embed-info-json', 'embed_infojson')
|
report_conflict('--embed-info-json', 'embed_infojson')
|
||||||
report_conflict('--embed-subs', 'embedsubtitles')
|
report_conflict('--embed-subs', 'embedsubtitles')
|
||||||
@@ -550,11 +559,11 @@ def report_deprecation(val, old, new=None):
|
|||||||
def get_postprocessors(opts):
|
def get_postprocessors(opts):
|
||||||
yield from opts.add_postprocessors
|
yield from opts.add_postprocessors
|
||||||
|
|
||||||
if opts.parse_metadata:
|
for when, actions in opts.parse_metadata.items():
|
||||||
yield {
|
yield {
|
||||||
'key': 'MetadataParser',
|
'key': 'MetadataParser',
|
||||||
'actions': opts.parse_metadata,
|
'actions': actions,
|
||||||
'when': 'pre_process'
|
'when': when
|
||||||
}
|
}
|
||||||
sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove
|
sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove
|
||||||
if sponsorblock_query:
|
if sponsorblock_query:
|
||||||
@@ -690,11 +699,12 @@ def parse_options(argv=None):
|
|||||||
|
|
||||||
postprocessors = list(get_postprocessors(opts))
|
postprocessors = list(get_postprocessors(opts))
|
||||||
|
|
||||||
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[2:])
|
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[3:])
|
||||||
any_getting = any(getattr(opts, k) for k in (
|
any_getting = any(getattr(opts, k) for k in (
|
||||||
'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
|
'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
|
||||||
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
|
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
|
||||||
))
|
))
|
||||||
|
opts.quiet = opts.quiet or any_getting or opts.print_json or bool(opts.forceprint)
|
||||||
|
|
||||||
playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
|
playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
|
||||||
write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
|
write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
|
||||||
@@ -730,7 +740,7 @@ def parse_options(argv=None):
|
|||||||
'client_certificate': opts.client_certificate,
|
'client_certificate': opts.client_certificate,
|
||||||
'client_certificate_key': opts.client_certificate_key,
|
'client_certificate_key': opts.client_certificate_key,
|
||||||
'client_certificate_password': opts.client_certificate_password,
|
'client_certificate_password': opts.client_certificate_password,
|
||||||
'quiet': opts.quiet or any_getting or opts.print_json or bool(opts.forceprint),
|
'quiet': opts.quiet,
|
||||||
'no_warnings': opts.no_warnings,
|
'no_warnings': opts.no_warnings,
|
||||||
'forceurl': opts.geturl,
|
'forceurl': opts.geturl,
|
||||||
'forcetitle': opts.gettitle,
|
'forcetitle': opts.gettitle,
|
||||||
@@ -766,6 +776,7 @@ def parse_options(argv=None):
|
|||||||
'windowsfilenames': opts.windowsfilenames,
|
'windowsfilenames': opts.windowsfilenames,
|
||||||
'ignoreerrors': opts.ignoreerrors,
|
'ignoreerrors': opts.ignoreerrors,
|
||||||
'force_generic_extractor': opts.force_generic_extractor,
|
'force_generic_extractor': opts.force_generic_extractor,
|
||||||
|
'allowed_extractors': opts.allowed_extractors or ['default'],
|
||||||
'ratelimit': opts.ratelimit,
|
'ratelimit': opts.ratelimit,
|
||||||
'throttledratelimit': opts.throttledratelimit,
|
'throttledratelimit': opts.throttledratelimit,
|
||||||
'overwrites': opts.overwrites,
|
'overwrites': opts.overwrites,
|
||||||
@@ -841,6 +852,7 @@ def parse_options(argv=None):
|
|||||||
'legacyserverconnect': opts.legacy_server_connect,
|
'legacyserverconnect': opts.legacy_server_connect,
|
||||||
'nocheckcertificate': opts.no_check_certificate,
|
'nocheckcertificate': opts.no_check_certificate,
|
||||||
'prefer_insecure': opts.prefer_insecure,
|
'prefer_insecure': opts.prefer_insecure,
|
||||||
|
'enable_file_urls': opts.enable_file_urls,
|
||||||
'http_headers': opts.headers,
|
'http_headers': opts.headers,
|
||||||
'proxy': opts.proxy,
|
'proxy': opts.proxy,
|
||||||
'socket_timeout': opts.socket_timeout,
|
'socket_timeout': opts.socket_timeout,
|
||||||
@@ -919,7 +931,7 @@ def _real_main(argv=None):
|
|||||||
if opts.rm_cachedir:
|
if opts.rm_cachedir:
|
||||||
ydl.cache.remove()
|
ydl.cache.remove()
|
||||||
|
|
||||||
updater = Updater(ydl)
|
updater = Updater(ydl, opts.update_self if isinstance(opts.update_self, str) else None)
|
||||||
if opts.update_self and updater.update() and actual_use:
|
if opts.update_self and updater.update() and actual_use:
|
||||||
if updater.cmd:
|
if updater.cmd:
|
||||||
return updater.restart()
|
return updater.restart()
|
||||||
@@ -949,6 +961,8 @@ def _real_main(argv=None):
|
|||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
|
global _IN_CLI
|
||||||
|
_IN_CLI = True
|
||||||
try:
|
try:
|
||||||
_exit(*variadic(_real_main(argv)))
|
_exit(*variadic(_real_main(argv)))
|
||||||
except DownloadError:
|
except DownloadError:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __package__ is None and not hasattr(sys, 'frozen'):
|
if __package__ is None and not getattr(sys, 'frozen', False):
|
||||||
# direct call of __main__.py
|
# direct call of __main__.py
|
||||||
import os.path
|
import os.path
|
||||||
path = os.path.realpath(os.path.abspath(__file__))
|
path = os.path.realpath(os.path.abspath(__file__))
|
||||||
|
|||||||
5
yt_dlp/__pyinstaller/__init__.py
Normal file
5
yt_dlp/__pyinstaller/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def get_hook_dirs():
|
||||||
|
return [os.path.dirname(__file__)]
|
||||||
31
yt_dlp/__pyinstaller/hook-yt_dlp.py
Normal file
31
yt_dlp/__pyinstaller/hook-yt_dlp.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from PyInstaller.utils.hooks import collect_submodules
|
||||||
|
|
||||||
|
|
||||||
|
def pycryptodome_module():
|
||||||
|
try:
|
||||||
|
import Cryptodome # noqa: F401
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import Crypto # noqa: F401
|
||||||
|
print('WARNING: Using Crypto since Cryptodome is not available. '
|
||||||
|
'Install with: pip install pycryptodomex', file=sys.stderr)
|
||||||
|
return 'Crypto'
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
return 'Cryptodome'
|
||||||
|
|
||||||
|
|
||||||
|
def get_hidden_imports():
|
||||||
|
yield 'yt_dlp.compat._legacy'
|
||||||
|
yield pycryptodome_module()
|
||||||
|
yield from collect_submodules('websockets')
|
||||||
|
# These are auto-detected, but explicitly add them just in case
|
||||||
|
yield from ('mutagen', 'brotli', 'certifi')
|
||||||
|
|
||||||
|
|
||||||
|
hiddenimports = list(get_hidden_imports())
|
||||||
|
print(f'Adding imports: {hiddenimports}')
|
||||||
|
|
||||||
|
excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts']
|
||||||
@@ -2,17 +2,17 @@
|
|||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from .compat import compat_ord
|
from .compat import compat_ord
|
||||||
from .dependencies import Cryptodome_AES
|
from .dependencies import Cryptodome
|
||||||
from .utils import bytes_to_intlist, intlist_to_bytes
|
from .utils import bytes_to_intlist, intlist_to_bytes
|
||||||
|
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
def aes_cbc_decrypt_bytes(data, key, iv):
|
def aes_cbc_decrypt_bytes(data, key, iv):
|
||||||
""" Decrypt bytes with AES-CBC using pycryptodome """
|
""" Decrypt bytes with AES-CBC using pycryptodome """
|
||||||
return Cryptodome_AES.new(key, Cryptodome_AES.MODE_CBC, iv).decrypt(data)
|
return Cryptodome.AES.new(key, Cryptodome.AES.MODE_CBC, iv).decrypt(data)
|
||||||
|
|
||||||
def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce):
|
def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce):
|
||||||
""" Decrypt bytes with AES-GCM using pycryptodome """
|
""" Decrypt bytes with AES-GCM using pycryptodome """
|
||||||
return Cryptodome_AES.new(key, Cryptodome_AES.MODE_GCM, nonce).decrypt_and_verify(data, tag)
|
return Cryptodome.AES.new(key, Cryptodome.AES.MODE_GCM, nonce).decrypt_and_verify(data, tag)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
def aes_cbc_decrypt_bytes(data, key, iv):
|
def aes_cbc_decrypt_bytes(data, key, iv):
|
||||||
@@ -28,11 +28,23 @@ def aes_cbc_encrypt_bytes(data, key, iv, **kwargs):
|
|||||||
return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs))
|
return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
BLOCK_SIZE_BYTES = 16
|
||||||
|
|
||||||
|
|
||||||
def unpad_pkcs7(data):
|
def unpad_pkcs7(data):
|
||||||
return data[:-compat_ord(data[-1])]
|
return data[:-compat_ord(data[-1])]
|
||||||
|
|
||||||
|
|
||||||
BLOCK_SIZE_BYTES = 16
|
def pkcs7_padding(data):
|
||||||
|
"""
|
||||||
|
PKCS#7 padding
|
||||||
|
|
||||||
|
@param {int[]} data cleartext
|
||||||
|
@returns {int[]} padding data
|
||||||
|
"""
|
||||||
|
|
||||||
|
remaining_length = BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES
|
||||||
|
return data + [remaining_length] * remaining_length
|
||||||
|
|
||||||
|
|
||||||
def pad_block(block, padding_mode):
|
def pad_block(block, padding_mode):
|
||||||
@@ -64,7 +76,7 @@ def pad_block(block, padding_mode):
|
|||||||
|
|
||||||
def aes_ecb_encrypt(data, key, iv=None):
|
def aes_ecb_encrypt(data, key, iv=None):
|
||||||
"""
|
"""
|
||||||
Encrypt with aes in ECB mode
|
Encrypt with aes in ECB mode. Using PKCS#7 padding
|
||||||
|
|
||||||
@param {int[]} data cleartext
|
@param {int[]} data cleartext
|
||||||
@param {int[]} key 16/24/32-Byte cipher key
|
@param {int[]} key 16/24/32-Byte cipher key
|
||||||
@@ -77,8 +89,7 @@ def aes_ecb_encrypt(data, key, iv=None):
|
|||||||
encrypted_data = []
|
encrypted_data = []
|
||||||
for i in range(block_count):
|
for i in range(block_count):
|
||||||
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
||||||
encrypted_data += aes_encrypt(block, expanded_key)
|
encrypted_data += aes_encrypt(pkcs7_padding(block), expanded_key)
|
||||||
encrypted_data = encrypted_data[:len(data)]
|
|
||||||
|
|
||||||
return encrypted_data
|
return encrypted_data
|
||||||
|
|
||||||
@@ -551,5 +562,6 @@ def ghash(subkey, data):
|
|||||||
|
|
||||||
'key_expansion',
|
'key_expansion',
|
||||||
'pad_block',
|
'pad_block',
|
||||||
|
'pkcs7_padding',
|
||||||
'unpad_pkcs7',
|
'unpad_pkcs7',
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import traceback
|
import traceback
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from .utils import expand_path, write_json_file
|
from .utils import expand_path, traverse_obj, version_tuple, write_json_file
|
||||||
|
from .version import __version__
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
@@ -21,11 +22,9 @@ def _get_root_dir(self):
|
|||||||
return expand_path(res)
|
return expand_path(res)
|
||||||
|
|
||||||
def _get_cache_fn(self, section, key, dtype):
|
def _get_cache_fn(self, section, key, dtype):
|
||||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', section), \
|
assert re.match(r'^[\w.-]+$', section), f'invalid section {section!r}'
|
||||||
'invalid section %r' % section
|
key = urllib.parse.quote(key, safe='').replace('%', ',') # encode non-ascii characters
|
||||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', key), 'invalid key %r' % key
|
return os.path.join(self._get_root_dir(), section, f'{key}.{dtype}')
|
||||||
return os.path.join(
|
|
||||||
self._get_root_dir(), section, f'{key}.{dtype}')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def enabled(self):
|
def enabled(self):
|
||||||
@@ -39,18 +38,22 @@ def store(self, section, key, data, dtype='json'):
|
|||||||
|
|
||||||
fn = self._get_cache_fn(section, key, dtype)
|
fn = self._get_cache_fn(section, key, dtype)
|
||||||
try:
|
try:
|
||||||
try:
|
os.makedirs(os.path.dirname(fn), exist_ok=True)
|
||||||
os.makedirs(os.path.dirname(fn))
|
|
||||||
except OSError as ose:
|
|
||||||
if ose.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
self._ydl.write_debug(f'Saving {section}.{key} to cache')
|
self._ydl.write_debug(f'Saving {section}.{key} to cache')
|
||||||
write_json_file(data, fn)
|
write_json_file({'yt-dlp_version': __version__, 'data': data}, fn)
|
||||||
except Exception:
|
except Exception:
|
||||||
tb = traceback.format_exc()
|
tb = traceback.format_exc()
|
||||||
self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}')
|
self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}')
|
||||||
|
|
||||||
def load(self, section, key, dtype='json', default=None):
|
def _validate(self, data, min_ver):
|
||||||
|
version = traverse_obj(data, 'yt-dlp_version')
|
||||||
|
if not version: # Backward compatibility
|
||||||
|
data, version = {'data': data}, '2022.08.19'
|
||||||
|
if not min_ver or version_tuple(version) >= version_tuple(min_ver):
|
||||||
|
return data['data']
|
||||||
|
self._ydl.write_debug(f'Discarding old cache from version {version} (needs {min_ver})')
|
||||||
|
|
||||||
|
def load(self, section, key, dtype='json', default=None, *, min_ver=None):
|
||||||
assert dtype in ('json',)
|
assert dtype in ('json',)
|
||||||
|
|
||||||
if not self.enabled:
|
if not self.enabled:
|
||||||
@@ -61,8 +64,8 @@ def load(self, section, key, dtype='json', default=None):
|
|||||||
try:
|
try:
|
||||||
with open(cache_fn, encoding='utf-8') as cachef:
|
with open(cache_fn, encoding='utf-8') as cachef:
|
||||||
self._ydl.write_debug(f'Loading {section}.{key} from cache')
|
self._ydl.write_debug(f'Loading {section}.{key} from cache')
|
||||||
return json.load(cachef)
|
return self._validate(json.load(cachef), min_ver)
|
||||||
except ValueError:
|
except (ValueError, KeyError):
|
||||||
try:
|
try:
|
||||||
file_size = os.path.getsize(cache_fn)
|
file_size = os.path.getsize(cache_fn)
|
||||||
except OSError as oe:
|
except OSError as oe:
|
||||||
|
|||||||
@@ -8,13 +8,13 @@
|
|||||||
|
|
||||||
# XXX: Implement this the same way as other DeprecationWarnings without circular import
|
# XXX: Implement this the same way as other DeprecationWarnings without circular import
|
||||||
passthrough_module(__name__, '._legacy', callback=lambda attr: warnings.warn(
|
passthrough_module(__name__, '._legacy', callback=lambda attr: warnings.warn(
|
||||||
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=3))
|
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=5))
|
||||||
|
|
||||||
|
|
||||||
# HTMLParseError has been deprecated in Python 3.3 and removed in
|
# HTMLParseError has been deprecated in Python 3.3 and removed in
|
||||||
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
||||||
# and uniform cross-version exception handling
|
# and uniform cross-version exception handling
|
||||||
class compat_HTMLParseError(Exception):
|
class compat_HTMLParseError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -70,9 +70,3 @@ def compat_expanduser(path):
|
|||||||
return userhome + path[i:]
|
return userhome + path[i:]
|
||||||
else:
|
else:
|
||||||
compat_expanduser = os.path.expanduser
|
compat_expanduser = os.path.expanduser
|
||||||
|
|
||||||
|
|
||||||
# NB: Add modules that are imported dynamically here so that PyInstaller can find them
|
|
||||||
# See https://github.com/pyinstaller/pyinstaller-hooks-contrib/issues/438
|
|
||||||
if False:
|
|
||||||
from . import _legacy # noqa: F401
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
""" Do not use! """
|
""" Do not use! """
|
||||||
|
|
||||||
|
import base64
|
||||||
import collections
|
import collections
|
||||||
import ctypes
|
import ctypes
|
||||||
import getpass
|
import getpass
|
||||||
@@ -29,10 +30,11 @@
|
|||||||
from re import Pattern as compat_Pattern # noqa: F401
|
from re import Pattern as compat_Pattern # noqa: F401
|
||||||
from re import match as compat_Match # noqa: F401
|
from re import match as compat_Match # noqa: F401
|
||||||
|
|
||||||
|
from . import compat_expanduser, compat_HTMLParseError, compat_realpath
|
||||||
from .compat_utils import passthrough_module
|
from .compat_utils import passthrough_module
|
||||||
from ..dependencies import Cryptodome_AES as compat_pycrypto_AES # noqa: F401
|
|
||||||
from ..dependencies import brotli as compat_brotli # noqa: F401
|
from ..dependencies import brotli as compat_brotli # noqa: F401
|
||||||
from ..dependencies import websockets as compat_websockets # noqa: F401
|
from ..dependencies import websockets as compat_websockets # noqa: F401
|
||||||
|
from ..dependencies.Cryptodome import AES as compat_pycrypto_AES # noqa: F401
|
||||||
|
|
||||||
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
||||||
|
|
||||||
@@ -47,22 +49,25 @@ def compat_setenv(key, value, env=os.environ):
|
|||||||
env[key] = value
|
env[key] = value
|
||||||
|
|
||||||
|
|
||||||
|
compat_base64_b64decode = base64.b64decode
|
||||||
compat_basestring = str
|
compat_basestring = str
|
||||||
|
compat_casefold = str.casefold
|
||||||
compat_chr = chr
|
compat_chr = chr
|
||||||
compat_collections_abc = collections.abc
|
compat_collections_abc = collections.abc
|
||||||
compat_cookiejar = http.cookiejar
|
compat_cookiejar = compat_http_cookiejar = http.cookiejar
|
||||||
compat_cookiejar_Cookie = http.cookiejar.Cookie
|
compat_cookiejar_Cookie = compat_http_cookiejar_Cookie = http.cookiejar.Cookie
|
||||||
compat_cookies = http.cookies
|
compat_cookies = compat_http_cookies = http.cookies
|
||||||
compat_cookies_SimpleCookie = http.cookies.SimpleCookie
|
compat_cookies_SimpleCookie = compat_http_cookies_SimpleCookie = http.cookies.SimpleCookie
|
||||||
compat_etree_Element = etree.Element
|
compat_etree_Element = compat_xml_etree_ElementTree_Element = etree.Element
|
||||||
compat_etree_register_namespace = etree.register_namespace
|
compat_etree_register_namespace = compat_xml_etree_register_namespace = etree.register_namespace
|
||||||
compat_filter = filter
|
compat_filter = filter
|
||||||
compat_get_terminal_size = shutil.get_terminal_size
|
compat_get_terminal_size = shutil.get_terminal_size
|
||||||
compat_getenv = os.getenv
|
compat_getenv = os.getenv
|
||||||
compat_getpass = getpass.getpass
|
compat_getpass = compat_getpass_getpass = getpass.getpass
|
||||||
compat_html_entities = html.entities
|
compat_html_entities = html.entities
|
||||||
compat_html_entities_html5 = html.entities.html5
|
compat_html_entities_html5 = html.entities.html5
|
||||||
compat_HTMLParser = html.parser.HTMLParser
|
compat_html_parser_HTMLParseError = compat_HTMLParseError
|
||||||
|
compat_HTMLParser = compat_html_parser_HTMLParser = html.parser.HTMLParser
|
||||||
compat_http_client = http.client
|
compat_http_client = http.client
|
||||||
compat_http_server = http.server
|
compat_http_server = http.server
|
||||||
compat_input = input
|
compat_input = input
|
||||||
@@ -71,6 +76,8 @@ def compat_setenv(key, value, env=os.environ):
|
|||||||
compat_kwargs = lambda kwargs: kwargs
|
compat_kwargs = lambda kwargs: kwargs
|
||||||
compat_map = map
|
compat_map = map
|
||||||
compat_numeric_types = (int, float, complex)
|
compat_numeric_types = (int, float, complex)
|
||||||
|
compat_os_path_expanduser = compat_expanduser
|
||||||
|
compat_os_path_realpath = compat_realpath
|
||||||
compat_print = print
|
compat_print = print
|
||||||
compat_shlex_split = shlex.split
|
compat_shlex_split = shlex.split
|
||||||
compat_socket_create_connection = socket.create_connection
|
compat_socket_create_connection = socket.create_connection
|
||||||
@@ -80,7 +87,9 @@ def compat_setenv(key, value, env=os.environ):
|
|||||||
compat_subprocess_get_DEVNULL = lambda: DEVNULL
|
compat_subprocess_get_DEVNULL = lambda: DEVNULL
|
||||||
compat_tokenize_tokenize = tokenize.tokenize
|
compat_tokenize_tokenize = tokenize.tokenize
|
||||||
compat_urllib_error = urllib.error
|
compat_urllib_error = urllib.error
|
||||||
|
compat_urllib_HTTPError = urllib.error.HTTPError
|
||||||
compat_urllib_parse = urllib.parse
|
compat_urllib_parse = urllib.parse
|
||||||
|
compat_urllib_parse_parse_qs = urllib.parse.parse_qs
|
||||||
compat_urllib_parse_quote = urllib.parse.quote
|
compat_urllib_parse_quote = urllib.parse.quote
|
||||||
compat_urllib_parse_quote_plus = urllib.parse.quote_plus
|
compat_urllib_parse_quote_plus = urllib.parse.quote_plus
|
||||||
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
||||||
@@ -89,8 +98,10 @@ def compat_setenv(key, value, env=os.environ):
|
|||||||
compat_urllib_request = urllib.request
|
compat_urllib_request = urllib.request
|
||||||
compat_urllib_request_DataHandler = urllib.request.DataHandler
|
compat_urllib_request_DataHandler = urllib.request.DataHandler
|
||||||
compat_urllib_response = urllib.response
|
compat_urllib_response = urllib.response
|
||||||
compat_urlretrieve = urllib.request.urlretrieve
|
compat_urlretrieve = compat_urllib_request_urlretrieve = urllib.request.urlretrieve
|
||||||
compat_xml_parse_error = etree.ParseError
|
compat_xml_parse_error = compat_xml_etree_ElementTree_ParseError = etree.ParseError
|
||||||
compat_xpath = lambda xpath: xpath
|
compat_xpath = lambda xpath: xpath
|
||||||
compat_zip = zip
|
compat_zip = zip
|
||||||
workaround_optparse_bug9161 = lambda: None
|
workaround_optparse_bug9161 = lambda: None
|
||||||
|
|
||||||
|
legacy = []
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import functools
|
||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
import types
|
import types
|
||||||
@@ -10,61 +11,73 @@
|
|||||||
|
|
||||||
|
|
||||||
def get_package_info(module):
|
def get_package_info(module):
|
||||||
parent = module.__name__.split('.')[0]
|
return _Package(
|
||||||
parent_module = None
|
name=getattr(module, '_yt_dlp__identifier', module.__name__),
|
||||||
with contextlib.suppress(ImportError):
|
version=str(next(filter(None, (
|
||||||
parent_module = importlib.import_module(parent)
|
getattr(module, attr, None)
|
||||||
|
for attr in ('__version__', 'version_string', 'version')
|
||||||
for attr in ('__version__', 'version_string', 'version'):
|
)), None)))
|
||||||
version = getattr(parent_module, attr, None)
|
|
||||||
if version is not None:
|
|
||||||
break
|
|
||||||
return _Package(getattr(module, '_yt_dlp__identifier', parent), str(version))
|
|
||||||
|
|
||||||
|
|
||||||
def _is_package(module):
|
def _is_package(module):
|
||||||
try:
|
return '__path__' in vars(module)
|
||||||
module.__getattribute__('__path__')
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def passthrough_module(parent, child, allowed_attributes=None, *, callback=lambda _: None):
|
def _is_dunder(name):
|
||||||
parent_module = importlib.import_module(parent)
|
return name.startswith('__') and name.endswith('__')
|
||||||
child_module = None # Import child module only as needed
|
|
||||||
|
|
||||||
class PassthroughModule(types.ModuleType):
|
|
||||||
def __getattr__(self, attr):
|
|
||||||
if _is_package(parent_module):
|
|
||||||
with contextlib.suppress(ImportError):
|
|
||||||
return importlib.import_module(f'.{attr}', parent)
|
|
||||||
|
|
||||||
ret = self.__from_child(attr)
|
class EnhancedModule(types.ModuleType):
|
||||||
if ret is _NO_ATTRIBUTE:
|
def __bool__(self):
|
||||||
raise AttributeError(f'module {parent} has no attribute {attr}')
|
return vars(self).get('__bool__', lambda: True)()
|
||||||
callback(attr)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def __from_child(self, attr):
|
def __getattribute__(self, attr):
|
||||||
if allowed_attributes is None:
|
try:
|
||||||
if attr.startswith('__') and attr.endswith('__'):
|
ret = super().__getattribute__(attr)
|
||||||
return _NO_ATTRIBUTE
|
except AttributeError:
|
||||||
elif attr not in allowed_attributes:
|
if _is_dunder(attr):
|
||||||
|
raise
|
||||||
|
getter = getattr(self, '__getattr__', None)
|
||||||
|
if not getter:
|
||||||
|
raise
|
||||||
|
ret = getter(attr)
|
||||||
|
return ret.fget() if isinstance(ret, property) else ret
|
||||||
|
|
||||||
|
|
||||||
|
def passthrough_module(parent, child, allowed_attributes=(..., ), *, callback=lambda _: None):
|
||||||
|
"""Passthrough parent module into a child module, creating the parent if necessary"""
|
||||||
|
def __getattr__(attr):
|
||||||
|
if _is_package(parent):
|
||||||
|
with contextlib.suppress(ModuleNotFoundError):
|
||||||
|
return importlib.import_module(f'.{attr}', parent.__name__)
|
||||||
|
|
||||||
|
ret = from_child(attr)
|
||||||
|
if ret is _NO_ATTRIBUTE:
|
||||||
|
raise AttributeError(f'module {parent.__name__} has no attribute {attr}')
|
||||||
|
callback(attr)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def from_child(attr):
|
||||||
|
nonlocal child
|
||||||
|
if attr not in allowed_attributes:
|
||||||
|
if ... not in allowed_attributes or _is_dunder(attr):
|
||||||
return _NO_ATTRIBUTE
|
return _NO_ATTRIBUTE
|
||||||
|
|
||||||
nonlocal child_module
|
if isinstance(child, str):
|
||||||
child_module = child_module or importlib.import_module(child, parent)
|
child = importlib.import_module(child, parent.__name__)
|
||||||
|
|
||||||
with contextlib.suppress(AttributeError):
|
if _is_package(child):
|
||||||
return getattr(child_module, attr)
|
with contextlib.suppress(ImportError):
|
||||||
|
return passthrough_module(f'{parent.__name__}.{attr}',
|
||||||
|
importlib.import_module(f'.{attr}', child.__name__))
|
||||||
|
|
||||||
if _is_package(child_module):
|
with contextlib.suppress(AttributeError):
|
||||||
with contextlib.suppress(ImportError):
|
return getattr(child, attr)
|
||||||
return importlib.import_module(f'.{attr}', child)
|
|
||||||
|
|
||||||
return _NO_ATTRIBUTE
|
return _NO_ATTRIBUTE
|
||||||
|
|
||||||
# Python 3.6 does not have module level __getattr__
|
parent = sys.modules.get(parent, types.ModuleType(parent))
|
||||||
# https://peps.python.org/pep-0562/
|
parent.__class__ = EnhancedModule
|
||||||
sys.modules[parent].__class__ = PassthroughModule
|
parent.__getattr__ = __getattr__
|
||||||
|
return parent
|
||||||
|
|||||||
30
yt_dlp/compat/shutil.py
Normal file
30
yt_dlp/compat/shutil.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# flake8: noqa: F405
|
||||||
|
from shutil import * # noqa: F403
|
||||||
|
|
||||||
|
from .compat_utils import passthrough_module
|
||||||
|
|
||||||
|
passthrough_module(__name__, 'shutil')
|
||||||
|
del passthrough_module
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.platform.startswith('freebsd'):
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# Workaround for PermissionError when using restricted ACL mode on FreeBSD
|
||||||
|
def copy2(src, dst, *args, **kwargs):
|
||||||
|
if os.path.isdir(dst):
|
||||||
|
dst = os.path.join(dst, os.path.basename(src))
|
||||||
|
shutil.copyfile(src, dst, *args, **kwargs)
|
||||||
|
try:
|
||||||
|
shutil.copystat(src, dst, *args, **kwargs)
|
||||||
|
except PermissionError as e:
|
||||||
|
if e.errno != getattr(errno, 'EPERM', None):
|
||||||
|
raise
|
||||||
|
return dst
|
||||||
|
|
||||||
|
def move(*args, copy_function=copy2, **kwargs):
|
||||||
|
return shutil.move(*args, copy_function=copy_function, **kwargs)
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
import base64
|
import base64
|
||||||
import contextlib
|
import contextlib
|
||||||
import http.cookiejar
|
import http.cookiejar
|
||||||
|
import http.cookies
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import struct
|
import struct
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -24,7 +26,14 @@
|
|||||||
sqlite3,
|
sqlite3,
|
||||||
)
|
)
|
||||||
from .minicurses import MultilinePrinter, QuietMultilinePrinter
|
from .minicurses import MultilinePrinter, QuietMultilinePrinter
|
||||||
from .utils import Popen, YoutubeDLCookieJar, error_to_str, expand_path
|
from .utils import (
|
||||||
|
Popen,
|
||||||
|
YoutubeDLCookieJar,
|
||||||
|
error_to_str,
|
||||||
|
expand_path,
|
||||||
|
is_path_like,
|
||||||
|
try_call,
|
||||||
|
)
|
||||||
|
|
||||||
CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi'}
|
CHROMIUM_BASED_BROWSERS = {'brave', 'chrome', 'chromium', 'edge', 'opera', 'vivaldi'}
|
||||||
SUPPORTED_BROWSERS = CHROMIUM_BASED_BROWSERS | {'firefox', 'safari'}
|
SUPPORTED_BROWSERS = CHROMIUM_BASED_BROWSERS | {'firefox', 'safari'}
|
||||||
@@ -85,11 +94,12 @@ def _create_progress_bar(logger):
|
|||||||
def load_cookies(cookie_file, browser_specification, ydl):
|
def load_cookies(cookie_file, browser_specification, ydl):
|
||||||
cookie_jars = []
|
cookie_jars = []
|
||||||
if browser_specification is not None:
|
if browser_specification is not None:
|
||||||
browser_name, profile, keyring = _parse_browser_specification(*browser_specification)
|
browser_name, profile, keyring, container = _parse_browser_specification(*browser_specification)
|
||||||
cookie_jars.append(extract_cookies_from_browser(browser_name, profile, YDLLogger(ydl), keyring=keyring))
|
cookie_jars.append(
|
||||||
|
extract_cookies_from_browser(browser_name, profile, YDLLogger(ydl), keyring=keyring, container=container))
|
||||||
|
|
||||||
if cookie_file is not None:
|
if cookie_file is not None:
|
||||||
is_filename = YoutubeDLCookieJar.is_path(cookie_file)
|
is_filename = is_path_like(cookie_file)
|
||||||
if is_filename:
|
if is_filename:
|
||||||
cookie_file = expand_path(cookie_file)
|
cookie_file = expand_path(cookie_file)
|
||||||
|
|
||||||
@@ -101,9 +111,9 @@ def load_cookies(cookie_file, browser_specification, ydl):
|
|||||||
return _merge_cookie_jars(cookie_jars)
|
return _merge_cookie_jars(cookie_jars)
|
||||||
|
|
||||||
|
|
||||||
def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(), *, keyring=None):
|
def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(), *, keyring=None, container=None):
|
||||||
if browser_name == 'firefox':
|
if browser_name == 'firefox':
|
||||||
return _extract_firefox_cookies(profile, logger)
|
return _extract_firefox_cookies(profile, container, logger)
|
||||||
elif browser_name == 'safari':
|
elif browser_name == 'safari':
|
||||||
return _extract_safari_cookies(profile, logger)
|
return _extract_safari_cookies(profile, logger)
|
||||||
elif browser_name in CHROMIUM_BASED_BROWSERS:
|
elif browser_name in CHROMIUM_BASED_BROWSERS:
|
||||||
@@ -112,7 +122,7 @@ def extract_cookies_from_browser(browser_name, profile=None, logger=YDLLogger(),
|
|||||||
raise ValueError(f'unknown browser: {browser_name}')
|
raise ValueError(f'unknown browser: {browser_name}')
|
||||||
|
|
||||||
|
|
||||||
def _extract_firefox_cookies(profile, logger):
|
def _extract_firefox_cookies(profile, container, logger):
|
||||||
logger.info('Extracting cookies from firefox')
|
logger.info('Extracting cookies from firefox')
|
||||||
if not sqlite3:
|
if not sqlite3:
|
||||||
logger.warning('Cannot extract cookies from firefox without sqlite3 support. '
|
logger.warning('Cannot extract cookies from firefox without sqlite3 support. '
|
||||||
@@ -131,11 +141,36 @@ def _extract_firefox_cookies(profile, logger):
|
|||||||
raise FileNotFoundError(f'could not find firefox cookies database in {search_root}')
|
raise FileNotFoundError(f'could not find firefox cookies database in {search_root}')
|
||||||
logger.debug(f'Extracting cookies from: "{cookie_database_path}"')
|
logger.debug(f'Extracting cookies from: "{cookie_database_path}"')
|
||||||
|
|
||||||
|
container_id = None
|
||||||
|
if container not in (None, 'none'):
|
||||||
|
containers_path = os.path.join(os.path.dirname(cookie_database_path), 'containers.json')
|
||||||
|
if not os.path.isfile(containers_path) or not os.access(containers_path, os.R_OK):
|
||||||
|
raise FileNotFoundError(f'could not read containers.json in {search_root}')
|
||||||
|
with open(containers_path) as containers:
|
||||||
|
identities = json.load(containers).get('identities', [])
|
||||||
|
container_id = next((context.get('userContextId') for context in identities if container in (
|
||||||
|
context.get('name'),
|
||||||
|
try_call(lambda: re.fullmatch(r'userContext([^\.]+)\.label', context['l10nID']).group())
|
||||||
|
)), None)
|
||||||
|
if not isinstance(container_id, int):
|
||||||
|
raise ValueError(f'could not find firefox container "{container}" in containers.json')
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
with tempfile.TemporaryDirectory(prefix='yt_dlp') as tmpdir:
|
||||||
cursor = None
|
cursor = None
|
||||||
try:
|
try:
|
||||||
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
cursor = _open_database_copy(cookie_database_path, tmpdir)
|
||||||
cursor.execute('SELECT host, name, value, path, expiry, isSecure FROM moz_cookies')
|
if isinstance(container_id, int):
|
||||||
|
logger.debug(
|
||||||
|
f'Only loading cookies from firefox container "{container}", ID {container_id}')
|
||||||
|
cursor.execute(
|
||||||
|
'SELECT host, name, value, path, expiry, isSecure FROM moz_cookies WHERE originAttributes LIKE ? OR originAttributes LIKE ?',
|
||||||
|
(f'%userContextId={container_id}', f'%userContextId={container_id}&%'))
|
||||||
|
elif container == 'none':
|
||||||
|
logger.debug('Only loading cookies not belonging to any container')
|
||||||
|
cursor.execute(
|
||||||
|
'SELECT host, name, value, path, expiry, isSecure FROM moz_cookies WHERE NOT INSTR(originAttributes,"userContextId=")')
|
||||||
|
else:
|
||||||
|
cursor.execute('SELECT host, name, value, path, expiry, isSecure FROM moz_cookies')
|
||||||
jar = YoutubeDLCookieJar()
|
jar = YoutubeDLCookieJar()
|
||||||
with _create_progress_bar(logger) as progress_bar:
|
with _create_progress_bar(logger) as progress_bar:
|
||||||
table = cursor.fetchall()
|
table = cursor.fetchall()
|
||||||
@@ -810,12 +845,15 @@ def _get_linux_keyring_password(browser_keyring_name, keyring, logger):
|
|||||||
def _get_mac_keyring_password(browser_keyring_name, logger):
|
def _get_mac_keyring_password(browser_keyring_name, logger):
|
||||||
logger.debug('using find-generic-password to obtain password from OSX keychain')
|
logger.debug('using find-generic-password to obtain password from OSX keychain')
|
||||||
try:
|
try:
|
||||||
stdout, _, _ = Popen.run(
|
stdout, _, returncode = Popen.run(
|
||||||
['security', 'find-generic-password',
|
['security', 'find-generic-password',
|
||||||
'-w', # write password to stdout
|
'-w', # write password to stdout
|
||||||
'-a', browser_keyring_name, # match 'account'
|
'-a', browser_keyring_name, # match 'account'
|
||||||
'-s', f'{browser_keyring_name} Safe Storage'], # match 'service'
|
'-s', f'{browser_keyring_name} Safe Storage'], # match 'service'
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
||||||
|
if returncode:
|
||||||
|
logger.warning('find-generic-password failed')
|
||||||
|
return None
|
||||||
return stdout.rstrip(b'\n')
|
return stdout.rstrip(b'\n')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f'exception running find-generic-password: {error_to_str(e)}')
|
logger.warning(f'exception running find-generic-password: {error_to_str(e)}')
|
||||||
@@ -948,11 +986,102 @@ def _is_path(value):
|
|||||||
return os.path.sep in value
|
return os.path.sep in value
|
||||||
|
|
||||||
|
|
||||||
def _parse_browser_specification(browser_name, profile=None, keyring=None):
|
def _parse_browser_specification(browser_name, profile=None, keyring=None, container=None):
|
||||||
if browser_name not in SUPPORTED_BROWSERS:
|
if browser_name not in SUPPORTED_BROWSERS:
|
||||||
raise ValueError(f'unsupported browser: "{browser_name}"')
|
raise ValueError(f'unsupported browser: "{browser_name}"')
|
||||||
if keyring not in (None, *SUPPORTED_KEYRINGS):
|
if keyring not in (None, *SUPPORTED_KEYRINGS):
|
||||||
raise ValueError(f'unsupported keyring: "{keyring}"')
|
raise ValueError(f'unsupported keyring: "{keyring}"')
|
||||||
if profile is not None and _is_path(profile):
|
if profile is not None and _is_path(expand_path(profile)):
|
||||||
profile = os.path.expanduser(profile)
|
profile = expand_path(profile)
|
||||||
return browser_name, profile, keyring
|
return browser_name, profile, keyring, container
|
||||||
|
|
||||||
|
|
||||||
|
class LenientSimpleCookie(http.cookies.SimpleCookie):
|
||||||
|
"""More lenient version of http.cookies.SimpleCookie"""
|
||||||
|
# From https://github.com/python/cpython/blob/v3.10.7/Lib/http/cookies.py
|
||||||
|
# We use Morsel's legal key chars to avoid errors on setting values
|
||||||
|
_LEGAL_KEY_CHARS = r'\w\d' + re.escape('!#$%&\'*+-.:^_`|~')
|
||||||
|
_LEGAL_VALUE_CHARS = _LEGAL_KEY_CHARS + re.escape('(),/<=>?@[]{}')
|
||||||
|
|
||||||
|
_RESERVED = {
|
||||||
|
"expires",
|
||||||
|
"path",
|
||||||
|
"comment",
|
||||||
|
"domain",
|
||||||
|
"max-age",
|
||||||
|
"secure",
|
||||||
|
"httponly",
|
||||||
|
"version",
|
||||||
|
"samesite",
|
||||||
|
}
|
||||||
|
|
||||||
|
_FLAGS = {"secure", "httponly"}
|
||||||
|
|
||||||
|
# Added 'bad' group to catch the remaining value
|
||||||
|
_COOKIE_PATTERN = re.compile(r"""
|
||||||
|
\s* # Optional whitespace at start of cookie
|
||||||
|
(?P<key> # Start of group 'key'
|
||||||
|
[""" + _LEGAL_KEY_CHARS + r"""]+?# Any word of at least one letter
|
||||||
|
) # End of group 'key'
|
||||||
|
( # Optional group: there may not be a value.
|
||||||
|
\s*=\s* # Equal Sign
|
||||||
|
( # Start of potential value
|
||||||
|
(?P<val> # Start of group 'val'
|
||||||
|
"(?:[^\\"]|\\.)*" # Any doublequoted string
|
||||||
|
| # or
|
||||||
|
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
|
||||||
|
| # or
|
||||||
|
[""" + _LEGAL_VALUE_CHARS + r"""]* # Any word or empty string
|
||||||
|
) # End of group 'val'
|
||||||
|
| # or
|
||||||
|
(?P<bad>(?:\\;|[^;])*?) # 'bad' group fallback for invalid values
|
||||||
|
) # End of potential value
|
||||||
|
)? # End of optional value group
|
||||||
|
\s* # Any number of spaces.
|
||||||
|
(\s+|;|$) # Ending either at space, semicolon, or EOS.
|
||||||
|
""", re.ASCII | re.VERBOSE)
|
||||||
|
|
||||||
|
def load(self, data):
|
||||||
|
# Workaround for https://github.com/yt-dlp/yt-dlp/issues/4776
|
||||||
|
if not isinstance(data, str):
|
||||||
|
return super().load(data)
|
||||||
|
|
||||||
|
morsel = None
|
||||||
|
for match in self._COOKIE_PATTERN.finditer(data):
|
||||||
|
if match.group('bad'):
|
||||||
|
morsel = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
key, value = match.group('key', 'val')
|
||||||
|
|
||||||
|
is_attribute = False
|
||||||
|
if key.startswith('$'):
|
||||||
|
key = key[1:]
|
||||||
|
is_attribute = True
|
||||||
|
|
||||||
|
lower_key = key.lower()
|
||||||
|
if lower_key in self._RESERVED:
|
||||||
|
if morsel is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
if lower_key not in self._FLAGS:
|
||||||
|
morsel = None
|
||||||
|
continue
|
||||||
|
value = True
|
||||||
|
else:
|
||||||
|
value, _ = self.value_decode(value)
|
||||||
|
|
||||||
|
morsel[key] = value
|
||||||
|
|
||||||
|
elif is_attribute:
|
||||||
|
morsel = None
|
||||||
|
|
||||||
|
elif value is not None:
|
||||||
|
morsel = self.get(key, http.cookies.Morsel())
|
||||||
|
real_value, coded_value = self.value_decode(value)
|
||||||
|
morsel.set(key, real_value, coded_value)
|
||||||
|
self[key] = morsel
|
||||||
|
|
||||||
|
else:
|
||||||
|
morsel = None
|
||||||
|
|||||||
36
yt_dlp/dependencies/Cryptodome.py
Normal file
36
yt_dlp/dependencies/Cryptodome.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import types
|
||||||
|
|
||||||
|
try:
|
||||||
|
import Cryptodome as _parent
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import Crypto as _parent
|
||||||
|
except (ImportError, SyntaxError): # Old Crypto gives SyntaxError in newer Python
|
||||||
|
_parent = types.ModuleType('no_Cryptodome')
|
||||||
|
__bool__ = lambda: False
|
||||||
|
|
||||||
|
__version__ = ''
|
||||||
|
AES = PKCS1_v1_5 = Blowfish = PKCS1_OAEP = SHA1 = CMAC = RSA = None
|
||||||
|
try:
|
||||||
|
if _parent.__name__ == 'Cryptodome':
|
||||||
|
from Cryptodome import __version__
|
||||||
|
from Cryptodome.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5
|
||||||
|
from Cryptodome.Hash import CMAC, SHA1
|
||||||
|
from Cryptodome.PublicKey import RSA
|
||||||
|
elif _parent.__name__ == 'Crypto':
|
||||||
|
from Crypto import __version__
|
||||||
|
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
||||||
|
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
||||||
|
from Crypto.PublicKey import RSA # noqa: F401
|
||||||
|
except ImportError:
|
||||||
|
__version__ = f'broken {__version__}'.strip()
|
||||||
|
|
||||||
|
|
||||||
|
_yt_dlp__identifier = _parent.__name__
|
||||||
|
if AES and _yt_dlp__identifier == 'Crypto':
|
||||||
|
try:
|
||||||
|
# In pycrypto, mode defaults to ECB. See:
|
||||||
|
# https://www.pycryptodome.org/en/latest/src/vs_pycrypto.html#:~:text=not%20have%20ECB%20as%20default%20mode
|
||||||
|
AES.new(b'abcdefghijklmnop')
|
||||||
|
except TypeError:
|
||||||
|
_yt_dlp__identifier = 'pycrypto'
|
||||||
@@ -23,24 +23,6 @@
|
|||||||
certifi = None
|
certifi = None
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from Cryptodome.Cipher import AES as Cryptodome_AES
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
from Crypto.Cipher import AES as Cryptodome_AES
|
|
||||||
except (ImportError, SyntaxError): # Old Crypto gives SyntaxError in newer Python
|
|
||||||
Cryptodome_AES = None
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
# In pycrypto, mode defaults to ECB. See:
|
|
||||||
# https://www.pycryptodome.org/en/latest/src/vs_pycrypto.html#:~:text=not%20have%20ECB%20as%20default%20mode
|
|
||||||
Cryptodome_AES.new(b'abcdefghijklmnop')
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
Cryptodome_AES._yt_dlp__identifier = 'pycrypto'
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import mutagen
|
import mutagen
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -84,12 +66,16 @@
|
|||||||
xattr._yt_dlp__identifier = 'pyxattr'
|
xattr._yt_dlp__identifier = 'pyxattr'
|
||||||
|
|
||||||
|
|
||||||
|
from . import Cryptodome
|
||||||
|
|
||||||
all_dependencies = {k: v for k, v in globals().items() if not k.startswith('_')}
|
all_dependencies = {k: v for k, v in globals().items() if not k.startswith('_')}
|
||||||
|
|
||||||
|
|
||||||
available_dependencies = {k: v for k, v in all_dependencies.items() if v}
|
available_dependencies = {k: v for k, v in all_dependencies.items() if v}
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated
|
||||||
|
Cryptodome_AES = Cryptodome.AES
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'all_dependencies',
|
'all_dependencies',
|
||||||
'available_dependencies',
|
'available_dependencies',
|
||||||
@@ -15,15 +15,17 @@
|
|||||||
from ..utils import (
|
from ..utils import (
|
||||||
IDENTITY,
|
IDENTITY,
|
||||||
NO_DEFAULT,
|
NO_DEFAULT,
|
||||||
NUMBER_RE,
|
|
||||||
LockingUnsupportedError,
|
LockingUnsupportedError,
|
||||||
Namespace,
|
Namespace,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
classproperty,
|
classproperty,
|
||||||
decodeArgument,
|
decodeArgument,
|
||||||
|
deprecation_warning,
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
format_bytes,
|
format_bytes,
|
||||||
join_nonempty,
|
join_nonempty,
|
||||||
|
parse_bytes,
|
||||||
|
remove_start,
|
||||||
sanitize_open,
|
sanitize_open,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
timeconvert,
|
timeconvert,
|
||||||
@@ -92,6 +94,7 @@ def _set_ydl(self, ydl):
|
|||||||
|
|
||||||
for func in (
|
for func in (
|
||||||
'deprecation_warning',
|
'deprecation_warning',
|
||||||
|
'deprecated_feature',
|
||||||
'report_error',
|
'report_error',
|
||||||
'report_file_already_downloaded',
|
'report_file_already_downloaded',
|
||||||
'report_warning',
|
'report_warning',
|
||||||
@@ -119,11 +122,11 @@ def format_seconds(seconds):
|
|||||||
time = timetuple_from_msec(seconds * 1000)
|
time = timetuple_from_msec(seconds * 1000)
|
||||||
if time.hours > 99:
|
if time.hours > 99:
|
||||||
return '--:--:--'
|
return '--:--:--'
|
||||||
if not time.hours:
|
|
||||||
return '%02d:%02d' % time[1:-1]
|
|
||||||
return '%02d:%02d:%02d' % time[:-1]
|
return '%02d:%02d:%02d' % time[:-1]
|
||||||
|
|
||||||
format_eta = format_seconds
|
@classmethod
|
||||||
|
def format_eta(cls, seconds):
|
||||||
|
return f'{remove_start(cls.format_seconds(seconds), "00:"):>8s}'
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def calc_percent(byte_counter, data_len):
|
def calc_percent(byte_counter, data_len):
|
||||||
@@ -178,12 +181,9 @@ def best_block_size(elapsed_time, bytes):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_bytes(bytestr):
|
def parse_bytes(bytestr):
|
||||||
"""Parse a string indicating a byte quantity into an integer."""
|
"""Parse a string indicating a byte quantity into an integer."""
|
||||||
matchobj = re.match(rf'(?i)^({NUMBER_RE})([kMGTPEZY]?)$', bytestr)
|
deprecation_warning('yt_dlp.FileDownloader.parse_bytes is deprecated and '
|
||||||
if matchobj is None:
|
'may be removed in the future. Use yt_dlp.utils.parse_bytes instead')
|
||||||
return None
|
return parse_bytes(bytestr)
|
||||||
number = float(matchobj.group(1))
|
|
||||||
multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower())
|
|
||||||
return int(round(number * multiplier))
|
|
||||||
|
|
||||||
def slow_down(self, start_time, now, byte_counter):
|
def slow_down(self, start_time, now, byte_counter):
|
||||||
"""Sleep if the download speed is over the rate limit."""
|
"""Sleep if the download speed is over the rate limit."""
|
||||||
@@ -331,6 +331,8 @@ def with_fields(*tups, default=''):
|
|||||||
return tmpl
|
return tmpl
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
_format_bytes = lambda k: f'{format_bytes(s.get(k)):>10s}'
|
||||||
|
|
||||||
if s['status'] == 'finished':
|
if s['status'] == 'finished':
|
||||||
if self.params.get('noprogress'):
|
if self.params.get('noprogress'):
|
||||||
self.to_screen('[download] Download completed')
|
self.to_screen('[download] Download completed')
|
||||||
@@ -338,7 +340,7 @@ def with_fields(*tups, default=''):
|
|||||||
s.update({
|
s.update({
|
||||||
'speed': speed,
|
'speed': speed,
|
||||||
'_speed_str': self.format_speed(speed).strip(),
|
'_speed_str': self.format_speed(speed).strip(),
|
||||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
'_total_bytes_str': _format_bytes('total_bytes'),
|
||||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
||||||
'_percent_str': self.format_percent(100),
|
'_percent_str': self.format_percent(100),
|
||||||
})
|
})
|
||||||
@@ -353,15 +355,15 @@ def with_fields(*tups, default=''):
|
|||||||
return
|
return
|
||||||
|
|
||||||
s.update({
|
s.update({
|
||||||
'_eta_str': self.format_eta(s.get('eta')),
|
'_eta_str': self.format_eta(s.get('eta')).strip(),
|
||||||
'_speed_str': self.format_speed(s.get('speed')),
|
'_speed_str': self.format_speed(s.get('speed')),
|
||||||
'_percent_str': self.format_percent(try_call(
|
'_percent_str': self.format_percent(try_call(
|
||||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes'],
|
lambda: 100 * s['downloaded_bytes'] / s['total_bytes'],
|
||||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'],
|
lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'],
|
||||||
lambda: s['downloaded_bytes'] == 0 and 0)),
|
lambda: s['downloaded_bytes'] == 0 and 0)),
|
||||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
'_total_bytes_str': _format_bytes('total_bytes'),
|
||||||
'_total_bytes_estimate_str': format_bytes(s.get('total_bytes_estimate')),
|
'_total_bytes_estimate_str': _format_bytes('total_bytes_estimate'),
|
||||||
'_downloaded_bytes_str': format_bytes(s.get('downloaded_bytes')),
|
'_downloaded_bytes_str': _format_bytes('downloaded_bytes'),
|
||||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import time
|
import time
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from . import get_suitable_downloader
|
from . import get_suitable_downloader
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from ..utils import urljoin
|
from ..utils import update_url_query, urljoin
|
||||||
|
|
||||||
|
|
||||||
class DashSegmentsFD(FragmentFD):
|
class DashSegmentsFD(FragmentFD):
|
||||||
@@ -40,7 +41,12 @@ def real_download(self, filename, info_dict):
|
|||||||
self._prepare_and_start_frag_download(ctx, fmt)
|
self._prepare_and_start_frag_download(ctx, fmt)
|
||||||
ctx['start'] = real_start
|
ctx['start'] = real_start
|
||||||
|
|
||||||
fragments_to_download = self._get_fragments(fmt, ctx)
|
extra_query = None
|
||||||
|
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
|
||||||
|
if extra_param_to_segment_url:
|
||||||
|
extra_query = urllib.parse.parse_qs(extra_param_to_segment_url)
|
||||||
|
|
||||||
|
fragments_to_download = self._get_fragments(fmt, ctx, extra_query)
|
||||||
|
|
||||||
if real_downloader:
|
if real_downloader:
|
||||||
self.to_screen(
|
self.to_screen(
|
||||||
@@ -51,13 +57,13 @@ def real_download(self, filename, info_dict):
|
|||||||
|
|
||||||
args.append([ctx, fragments_to_download, fmt])
|
args.append([ctx, fragments_to_download, fmt])
|
||||||
|
|
||||||
return self.download_and_append_fragments_multiple(*args)
|
return self.download_and_append_fragments_multiple(*args, is_fatal=lambda idx: idx == 0)
|
||||||
|
|
||||||
def _resolve_fragments(self, fragments, ctx):
|
def _resolve_fragments(self, fragments, ctx):
|
||||||
fragments = fragments(ctx) if callable(fragments) else fragments
|
fragments = fragments(ctx) if callable(fragments) else fragments
|
||||||
return [next(iter(fragments))] if self.params.get('test') else fragments
|
return [next(iter(fragments))] if self.params.get('test') else fragments
|
||||||
|
|
||||||
def _get_fragments(self, fmt, ctx):
|
def _get_fragments(self, fmt, ctx, extra_query):
|
||||||
fragment_base_url = fmt.get('fragment_base_url')
|
fragment_base_url = fmt.get('fragment_base_url')
|
||||||
fragments = self._resolve_fragments(fmt['fragments'], ctx)
|
fragments = self._resolve_fragments(fmt['fragments'], ctx)
|
||||||
|
|
||||||
@@ -70,6 +76,8 @@ def _get_fragments(self, fmt, ctx):
|
|||||||
if not fragment_url:
|
if not fragment_url:
|
||||||
assert fragment_base_url
|
assert fragment_base_url
|
||||||
fragment_url = urljoin(fragment_base_url, fragment['path'])
|
fragment_url = urljoin(fragment_base_url, fragment['path'])
|
||||||
|
if extra_query:
|
||||||
|
fragment_url = update_url_query(fragment_url, extra_query)
|
||||||
|
|
||||||
yield {
|
yield {
|
||||||
'frag_index': frag_index,
|
'frag_index': frag_index,
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import enum
|
import enum
|
||||||
|
import json
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from ..compat import functools
|
from ..compat import functools
|
||||||
@@ -20,8 +22,10 @@
|
|||||||
determine_ext,
|
determine_ext,
|
||||||
encodeArgument,
|
encodeArgument,
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
|
find_available_port,
|
||||||
handle_youtubedl_headers,
|
handle_youtubedl_headers,
|
||||||
remove_end,
|
remove_end,
|
||||||
|
sanitized_Request,
|
||||||
traverse_obj,
|
traverse_obj,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -60,7 +64,6 @@ def real_download(self, filename, info_dict):
|
|||||||
}
|
}
|
||||||
if filename != '-':
|
if filename != '-':
|
||||||
fsize = os.path.getsize(encodeFilename(tmpfilename))
|
fsize = os.path.getsize(encodeFilename(tmpfilename))
|
||||||
self.to_screen(f'\r[{self.get_basename()}] Downloaded {fsize} bytes')
|
|
||||||
self.try_rename(tmpfilename, filename)
|
self.try_rename(tmpfilename, filename)
|
||||||
status.update({
|
status.update({
|
||||||
'downloaded_bytes': fsize,
|
'downloaded_bytes': fsize,
|
||||||
@@ -101,6 +104,7 @@ def supports(cls, info_dict):
|
|||||||
return all((
|
return all((
|
||||||
not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES,
|
not info_dict.get('to_stdout') or Features.TO_STDOUT in cls.SUPPORTED_FEATURES,
|
||||||
'+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES,
|
'+' not in info_dict['protocol'] or Features.MULTIPLE_FORMATS in cls.SUPPORTED_FEATURES,
|
||||||
|
not traverse_obj(info_dict, ('hls_aes', ...), 'extra_param_to_segment_url'),
|
||||||
all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')),
|
all(proto in cls.SUPPORTED_PROTOCOLS for proto in info_dict['protocol'].split('+')),
|
||||||
))
|
))
|
||||||
|
|
||||||
@@ -129,8 +133,7 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
self._debug_cmd(cmd)
|
self._debug_cmd(cmd)
|
||||||
|
|
||||||
if 'fragments' not in info_dict:
|
if 'fragments' not in info_dict:
|
||||||
_, stderr, returncode = Popen.run(
|
_, stderr, returncode = self._call_process(cmd, info_dict)
|
||||||
cmd, text=True, stderr=subprocess.PIPE if self._CAPTURE_STDERR else None)
|
|
||||||
if returncode and stderr:
|
if returncode and stderr:
|
||||||
self.to_stderr(stderr)
|
self.to_stderr(stderr)
|
||||||
return returncode
|
return returncode
|
||||||
@@ -140,7 +143,7 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry,
|
retry_manager = RetryManager(self.params.get('fragment_retries'), self.report_retry,
|
||||||
frag_index=None, fatal=not skip_unavailable_fragments)
|
frag_index=None, fatal=not skip_unavailable_fragments)
|
||||||
for retry in retry_manager:
|
for retry in retry_manager:
|
||||||
_, stderr, returncode = Popen.run(cmd, text=True, stderr=subprocess.PIPE)
|
_, stderr, returncode = self._call_process(cmd, info_dict)
|
||||||
if not returncode:
|
if not returncode:
|
||||||
break
|
break
|
||||||
# TODO: Decide whether to retry based on error code
|
# TODO: Decide whether to retry based on error code
|
||||||
@@ -172,6 +175,9 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
self.try_remove(encodeFilename('%s.frag.urls' % tmpfilename))
|
self.try_remove(encodeFilename('%s.frag.urls' % tmpfilename))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
def _call_process(self, cmd, info_dict):
|
||||||
|
return Popen.run(cmd, text=True, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
|
||||||
class CurlFD(ExternalFD):
|
class CurlFD(ExternalFD):
|
||||||
AVAILABLE_OPT = '-V'
|
AVAILABLE_OPT = '-V'
|
||||||
@@ -252,6 +258,19 @@ def supports_manifest(manifest):
|
|||||||
check_results = (not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES)
|
check_results = (not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES)
|
||||||
return all(check_results)
|
return all(check_results)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _aria2c_filename(fn):
|
||||||
|
return fn if os.path.isabs(fn) else f'.{os.path.sep}{fn}'
|
||||||
|
|
||||||
|
def _call_downloader(self, tmpfilename, info_dict):
|
||||||
|
# FIXME: Disabled due to https://github.com/yt-dlp/yt-dlp/issues/5931
|
||||||
|
if False and 'no-external-downloader-progress' not in self.params.get('compat_opts', []):
|
||||||
|
info_dict['__rpc'] = {
|
||||||
|
'port': find_available_port() or 19190,
|
||||||
|
'secret': str(uuid.uuid4()),
|
||||||
|
}
|
||||||
|
return super()._call_downloader(tmpfilename, info_dict)
|
||||||
|
|
||||||
def _make_cmd(self, tmpfilename, info_dict):
|
def _make_cmd(self, tmpfilename, info_dict):
|
||||||
cmd = [self.exe, '-c',
|
cmd = [self.exe, '-c',
|
||||||
'--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
|
'--console-log-level=warn', '--summary-interval=0', '--download-result=hide',
|
||||||
@@ -272,6 +291,12 @@ def _make_cmd(self, tmpfilename, info_dict):
|
|||||||
cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=')
|
cmd += self._bool_option('--show-console-readout', 'noprogress', 'false', 'true', '=')
|
||||||
cmd += self._configuration_args()
|
cmd += self._configuration_args()
|
||||||
|
|
||||||
|
if '__rpc' in info_dict:
|
||||||
|
cmd += [
|
||||||
|
'--enable-rpc',
|
||||||
|
f'--rpc-listen-port={info_dict["__rpc"]["port"]}',
|
||||||
|
f'--rpc-secret={info_dict["__rpc"]["secret"]}']
|
||||||
|
|
||||||
# aria2c strips out spaces from the beginning/end of filenames and paths.
|
# aria2c strips out spaces from the beginning/end of filenames and paths.
|
||||||
# We work around this issue by adding a "./" to the beginning of the
|
# We work around this issue by adding a "./" to the beginning of the
|
||||||
# filename and relative path, and adding a "/" at the end of the path.
|
# filename and relative path, and adding a "/" at the end of the path.
|
||||||
@@ -280,11 +305,9 @@ def _make_cmd(self, tmpfilename, info_dict):
|
|||||||
# https://github.com/aria2/aria2/issues/1373
|
# https://github.com/aria2/aria2/issues/1373
|
||||||
dn = os.path.dirname(tmpfilename)
|
dn = os.path.dirname(tmpfilename)
|
||||||
if dn:
|
if dn:
|
||||||
if not os.path.isabs(dn):
|
cmd += ['--dir', self._aria2c_filename(dn) + os.path.sep]
|
||||||
dn = f'.{os.path.sep}{dn}'
|
|
||||||
cmd += ['--dir', dn + os.path.sep]
|
|
||||||
if 'fragments' not in info_dict:
|
if 'fragments' not in info_dict:
|
||||||
cmd += ['--out', f'.{os.path.sep}{os.path.basename(tmpfilename)}']
|
cmd += ['--out', self._aria2c_filename(os.path.basename(tmpfilename))]
|
||||||
cmd += ['--auto-file-renaming=false']
|
cmd += ['--auto-file-renaming=false']
|
||||||
|
|
||||||
if 'fragments' in info_dict:
|
if 'fragments' in info_dict:
|
||||||
@@ -293,15 +316,97 @@ def _make_cmd(self, tmpfilename, info_dict):
|
|||||||
url_list = []
|
url_list = []
|
||||||
for frag_index, fragment in enumerate(info_dict['fragments']):
|
for frag_index, fragment in enumerate(info_dict['fragments']):
|
||||||
fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index)
|
fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index)
|
||||||
url_list.append('%s\n\tout=%s' % (fragment['url'], fragment_filename))
|
url_list.append('%s\n\tout=%s' % (fragment['url'], self._aria2c_filename(fragment_filename)))
|
||||||
stream, _ = self.sanitize_open(url_list_file, 'wb')
|
stream, _ = self.sanitize_open(url_list_file, 'wb')
|
||||||
stream.write('\n'.join(url_list).encode())
|
stream.write('\n'.join(url_list).encode())
|
||||||
stream.close()
|
stream.close()
|
||||||
cmd += ['-i', url_list_file]
|
cmd += ['-i', self._aria2c_filename(url_list_file)]
|
||||||
else:
|
else:
|
||||||
cmd += ['--', info_dict['url']]
|
cmd += ['--', info_dict['url']]
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
def aria2c_rpc(self, rpc_port, rpc_secret, method, params=()):
|
||||||
|
# Does not actually need to be UUID, just unique
|
||||||
|
sanitycheck = str(uuid.uuid4())
|
||||||
|
d = json.dumps({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': sanitycheck,
|
||||||
|
'method': method,
|
||||||
|
'params': [f'token:{rpc_secret}', *params],
|
||||||
|
}).encode('utf-8')
|
||||||
|
request = sanitized_Request(
|
||||||
|
f'http://localhost:{rpc_port}/jsonrpc',
|
||||||
|
data=d, headers={
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Content-Length': f'{len(d)}',
|
||||||
|
'Ytdl-request-proxy': '__noproxy__',
|
||||||
|
})
|
||||||
|
with self.ydl.urlopen(request) as r:
|
||||||
|
resp = json.load(r)
|
||||||
|
assert resp.get('id') == sanitycheck, 'Something went wrong with RPC server'
|
||||||
|
return resp['result']
|
||||||
|
|
||||||
|
def _call_process(self, cmd, info_dict):
|
||||||
|
if '__rpc' not in info_dict:
|
||||||
|
return super()._call_process(cmd, info_dict)
|
||||||
|
|
||||||
|
send_rpc = functools.partial(self.aria2c_rpc, info_dict['__rpc']['port'], info_dict['__rpc']['secret'])
|
||||||
|
started = time.time()
|
||||||
|
|
||||||
|
fragmented = 'fragments' in info_dict
|
||||||
|
frag_count = len(info_dict['fragments']) if fragmented else 1
|
||||||
|
status = {
|
||||||
|
'filename': info_dict.get('_filename'),
|
||||||
|
'status': 'downloading',
|
||||||
|
'elapsed': 0,
|
||||||
|
'downloaded_bytes': 0,
|
||||||
|
'fragment_count': frag_count if fragmented else None,
|
||||||
|
'fragment_index': 0 if fragmented else None,
|
||||||
|
}
|
||||||
|
self._hook_progress(status, info_dict)
|
||||||
|
|
||||||
|
def get_stat(key, *obj, average=False):
|
||||||
|
val = tuple(filter(None, map(float, traverse_obj(obj, (..., ..., key))))) or [0]
|
||||||
|
return sum(val) / (len(val) if average else 1)
|
||||||
|
|
||||||
|
with Popen(cmd, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as p:
|
||||||
|
# Add a small sleep so that RPC client can receive response,
|
||||||
|
# or the connection stalls infinitely
|
||||||
|
time.sleep(0.2)
|
||||||
|
retval = p.poll()
|
||||||
|
while retval is None:
|
||||||
|
# We don't use tellStatus as we won't know the GID without reading stdout
|
||||||
|
# Ref: https://aria2.github.io/manual/en/html/aria2c.html#aria2.tellActive
|
||||||
|
active = send_rpc('aria2.tellActive')
|
||||||
|
completed = send_rpc('aria2.tellStopped', [0, frag_count])
|
||||||
|
|
||||||
|
downloaded = get_stat('totalLength', completed) + get_stat('completedLength', active)
|
||||||
|
speed = get_stat('downloadSpeed', active)
|
||||||
|
total = frag_count * get_stat('totalLength', active, completed, average=True)
|
||||||
|
if total < downloaded:
|
||||||
|
total = None
|
||||||
|
|
||||||
|
status.update({
|
||||||
|
'downloaded_bytes': int(downloaded),
|
||||||
|
'speed': speed,
|
||||||
|
'total_bytes': None if fragmented else total,
|
||||||
|
'total_bytes_estimate': total,
|
||||||
|
'eta': (total - downloaded) / (speed or 1),
|
||||||
|
'fragment_index': min(frag_count, len(completed) + 1) if fragmented else None,
|
||||||
|
'elapsed': time.time() - started
|
||||||
|
})
|
||||||
|
self._hook_progress(status, info_dict)
|
||||||
|
|
||||||
|
if not active and len(completed) >= frag_count:
|
||||||
|
send_rpc('aria2.shutdown')
|
||||||
|
retval = p.wait()
|
||||||
|
break
|
||||||
|
|
||||||
|
time.sleep(0.1)
|
||||||
|
retval = p.poll()
|
||||||
|
|
||||||
|
return '', p.stderr.read(), retval
|
||||||
|
|
||||||
|
|
||||||
class HttpieFD(ExternalFD):
|
class HttpieFD(ExternalFD):
|
||||||
AVAILABLE_OPT = '--version'
|
AVAILABLE_OPT = '--version'
|
||||||
@@ -340,7 +445,6 @@ def can_merge_formats(cls, info_dict, params):
|
|||||||
and cls.can_download(info_dict))
|
and cls.can_download(info_dict))
|
||||||
|
|
||||||
def _call_downloader(self, tmpfilename, info_dict):
|
def _call_downloader(self, tmpfilename, info_dict):
|
||||||
urls = [f['url'] for f in info_dict.get('requested_formats', [])] or [info_dict['url']]
|
|
||||||
ffpp = FFmpegPostProcessor(downloader=self)
|
ffpp = FFmpegPostProcessor(downloader=self)
|
||||||
if not ffpp.available:
|
if not ffpp.available:
|
||||||
self.report_error('m3u8 download detected but ffmpeg could not be found. Please install')
|
self.report_error('m3u8 download detected but ffmpeg could not be found. Please install')
|
||||||
@@ -370,16 +474,6 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
# http://trac.ffmpeg.org/ticket/6125#comment:10
|
# http://trac.ffmpeg.org/ticket/6125#comment:10
|
||||||
args += ['-seekable', '1' if seekable else '0']
|
args += ['-seekable', '1' if seekable else '0']
|
||||||
|
|
||||||
http_headers = None
|
|
||||||
if info_dict.get('http_headers'):
|
|
||||||
youtubedl_headers = handle_youtubedl_headers(info_dict['http_headers'])
|
|
||||||
http_headers = [
|
|
||||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
|
||||||
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
|
||||||
'-headers',
|
|
||||||
''.join(f'{key}: {val}\r\n' for key, val in youtubedl_headers.items())
|
|
||||||
]
|
|
||||||
|
|
||||||
env = None
|
env = None
|
||||||
proxy = self.params.get('proxy')
|
proxy = self.params.get('proxy')
|
||||||
if proxy:
|
if proxy:
|
||||||
@@ -432,21 +526,26 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
|
|
||||||
start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end')
|
start_time, end_time = info_dict.get('section_start') or 0, info_dict.get('section_end')
|
||||||
|
|
||||||
for i, url in enumerate(urls):
|
selected_formats = info_dict.get('requested_formats') or [info_dict]
|
||||||
if http_headers is not None and re.match(r'^https?://', url):
|
for i, fmt in enumerate(selected_formats):
|
||||||
args += http_headers
|
if fmt.get('http_headers') and re.match(r'^https?://', fmt['url']):
|
||||||
|
headers_dict = handle_youtubedl_headers(fmt['http_headers'])
|
||||||
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
||||||
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
||||||
|
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in headers_dict.items())])
|
||||||
|
|
||||||
if start_time:
|
if start_time:
|
||||||
args += ['-ss', str(start_time)]
|
args += ['-ss', str(start_time)]
|
||||||
if end_time:
|
if end_time:
|
||||||
args += ['-t', str(end_time - start_time)]
|
args += ['-t', str(end_time - start_time)]
|
||||||
|
|
||||||
args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', url]
|
args += self._configuration_args((f'_i{i + 1}', '_i')) + ['-i', fmt['url']]
|
||||||
|
|
||||||
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
if not (start_time or end_time) or not self.params.get('force_keyframes_at_cuts'):
|
||||||
args += ['-c', 'copy']
|
args += ['-c', 'copy']
|
||||||
|
|
||||||
if info_dict.get('requested_formats') or protocol == 'http_dash_segments':
|
if info_dict.get('requested_formats') or protocol == 'http_dash_segments':
|
||||||
for (i, fmt) in enumerate(info_dict.get('requested_formats') or [info_dict]):
|
for i, fmt in enumerate(selected_formats):
|
||||||
stream_number = fmt.get('manifest_stream_number', 0)
|
stream_number = fmt.get('manifest_stream_number', 0)
|
||||||
args.extend(['-map', f'{i}:{stream_number}'])
|
args.extend(['-map', f'{i}:{stream_number}'])
|
||||||
|
|
||||||
@@ -486,8 +585,9 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
||||||
self._debug_cmd(args)
|
self._debug_cmd(args)
|
||||||
|
|
||||||
|
piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats)
|
||||||
with Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
with Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
||||||
if url in ('-', 'pipe:'):
|
if piped:
|
||||||
self.on_process_started(proc, proc.stdin)
|
self.on_process_started(proc, proc.stdin)
|
||||||
try:
|
try:
|
||||||
retval = proc.wait()
|
retval = proc.wait()
|
||||||
@@ -497,7 +597,7 @@ def _call_downloader(self, tmpfilename, info_dict):
|
|||||||
# produces a file that is playable (this is mostly useful for live
|
# produces a file that is playable (this is mostly useful for live
|
||||||
# streams). Note that Windows is not affected and produces playable
|
# streams). Note that Windows is not affected and produces playable
|
||||||
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
||||||
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and url not in ('-', 'pipe:'):
|
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped:
|
||||||
proc.communicate_or_kill(b'q')
|
proc.communicate_or_kill(b'q')
|
||||||
else:
|
else:
|
||||||
proc.kill(timeout=None)
|
proc.kill(timeout=None)
|
||||||
@@ -515,16 +615,14 @@ class AVconvFD(FFmpegFD):
|
|||||||
if name.endswith('FD') and name not in ('ExternalFD', 'FragmentFD')
|
if name.endswith('FD') and name not in ('ExternalFD', 'FragmentFD')
|
||||||
}
|
}
|
||||||
|
|
||||||
_BY_EXE = {klass.EXE_NAME: klass for klass in _BY_NAME.values()}
|
|
||||||
|
|
||||||
|
|
||||||
def list_external_downloaders():
|
def list_external_downloaders():
|
||||||
return sorted(_BY_NAME.keys())
|
return sorted(_BY_NAME.keys())
|
||||||
|
|
||||||
|
|
||||||
def get_external_downloader(external_downloader):
|
def get_external_downloader(external_downloader):
|
||||||
""" Given the name of the executable, see whether we support the given
|
""" Given the name of the executable, see whether we support the given downloader """
|
||||||
downloader . """
|
|
||||||
# Drop .exe extension on Windows
|
|
||||||
bn = os.path.splitext(os.path.basename(external_downloader))[0]
|
bn = os.path.splitext(os.path.basename(external_downloader))[0]
|
||||||
return _BY_NAME.get(bn, _BY_EXE.get(bn))
|
return _BY_NAME.get(bn) or next((
|
||||||
|
klass for klass in _BY_NAME.values() if klass.EXE_NAME in bn
|
||||||
|
), None)
|
||||||
|
|||||||
@@ -424,6 +424,4 @@ def real_download(self, filename, info_dict):
|
|||||||
msg = 'Missed %d fragments' % (fragments_list[0][1] - (frag_i + 1))
|
msg = 'Missed %d fragments' % (fragments_list[0][1] - (frag_i + 1))
|
||||||
self.report_warning(msg)
|
self.report_warning(msg)
|
||||||
|
|
||||||
self._finish_frag_download(ctx, info_dict)
|
return self._finish_frag_download(ctx, info_dict)
|
||||||
|
|
||||||
return True
|
|
||||||
|
|||||||
@@ -65,8 +65,8 @@ class FragmentFD(FileDownloader):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def report_retry_fragment(self, err, frag_index, count, retries):
|
def report_retry_fragment(self, err, frag_index, count, retries):
|
||||||
self.deprecation_warning(
|
self.deprecation_warning('yt_dlp.downloader.FragmentFD.report_retry_fragment is deprecated. '
|
||||||
'yt_dlp.downloader.FragmentFD.report_retry_fragment is deprecated. Use yt_dlp.downloader.FileDownloader.report_retry instead')
|
'Use yt_dlp.downloader.FileDownloader.report_retry instead')
|
||||||
return self.report_retry(err, count, retries, frag_index)
|
return self.report_retry(err, count, retries, frag_index)
|
||||||
|
|
||||||
def report_skip_fragment(self, frag_index, err=None):
|
def report_skip_fragment(self, frag_index, err=None):
|
||||||
@@ -295,16 +295,23 @@ def _finish_frag_download(self, ctx, info_dict):
|
|||||||
self.try_remove(ytdl_filename)
|
self.try_remove(ytdl_filename)
|
||||||
elapsed = time.time() - ctx['started']
|
elapsed = time.time() - ctx['started']
|
||||||
|
|
||||||
if ctx['tmpfilename'] == '-':
|
to_file = ctx['tmpfilename'] != '-'
|
||||||
downloaded_bytes = ctx['complete_frags_downloaded_bytes']
|
if to_file:
|
||||||
|
downloaded_bytes = os.path.getsize(encodeFilename(ctx['tmpfilename']))
|
||||||
else:
|
else:
|
||||||
|
downloaded_bytes = ctx['complete_frags_downloaded_bytes']
|
||||||
|
|
||||||
|
if not downloaded_bytes:
|
||||||
|
if to_file:
|
||||||
|
self.try_remove(ctx['tmpfilename'])
|
||||||
|
self.report_error('The downloaded file is empty')
|
||||||
|
return False
|
||||||
|
elif to_file:
|
||||||
self.try_rename(ctx['tmpfilename'], ctx['filename'])
|
self.try_rename(ctx['tmpfilename'], ctx['filename'])
|
||||||
if self.params.get('updatetime', True):
|
filetime = ctx.get('fragment_filetime')
|
||||||
filetime = ctx.get('fragment_filetime')
|
if self.params.get('updatetime', True) and filetime:
|
||||||
if filetime:
|
with contextlib.suppress(Exception):
|
||||||
with contextlib.suppress(Exception):
|
os.utime(ctx['filename'], (time.time(), filetime))
|
||||||
os.utime(ctx['filename'], (time.time(), filetime))
|
|
||||||
downloaded_bytes = os.path.getsize(encodeFilename(ctx['filename']))
|
|
||||||
|
|
||||||
self._hook_progress({
|
self._hook_progress({
|
||||||
'downloaded_bytes': downloaded_bytes,
|
'downloaded_bytes': downloaded_bytes,
|
||||||
@@ -316,6 +323,7 @@ def _finish_frag_download(self, ctx, info_dict):
|
|||||||
'max_progress': ctx.get('max_progress'),
|
'max_progress': ctx.get('max_progress'),
|
||||||
'progress_idx': ctx.get('progress_idx'),
|
'progress_idx': ctx.get('progress_idx'),
|
||||||
}, info_dict)
|
}, info_dict)
|
||||||
|
return True
|
||||||
|
|
||||||
def _prepare_external_frag_download(self, ctx):
|
def _prepare_external_frag_download(self, ctx):
|
||||||
if 'live' not in ctx:
|
if 'live' not in ctx:
|
||||||
@@ -352,7 +360,8 @@ def decrypt_fragment(fragment, frag_content):
|
|||||||
if not decrypt_info or decrypt_info['METHOD'] != 'AES-128':
|
if not decrypt_info or decrypt_info['METHOD'] != 'AES-128':
|
||||||
return frag_content
|
return frag_content
|
||||||
iv = decrypt_info.get('IV') or struct.pack('>8xq', fragment['media_sequence'])
|
iv = decrypt_info.get('IV') or struct.pack('>8xq', fragment['media_sequence'])
|
||||||
decrypt_info['KEY'] = decrypt_info.get('KEY') or _get_key(info_dict.get('_decryption_key_url') or decrypt_info['URI'])
|
decrypt_info['KEY'] = (decrypt_info.get('KEY')
|
||||||
|
or _get_key(traverse_obj(info_dict, ('hls_aes', 'uri')) or decrypt_info['URI']))
|
||||||
# Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
|
# Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
|
||||||
# size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
|
# size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
|
||||||
# not what it decrypts to.
|
# not what it decrypts to.
|
||||||
@@ -362,7 +371,7 @@ def decrypt_fragment(fragment, frag_content):
|
|||||||
|
|
||||||
return decrypt_fragment
|
return decrypt_fragment
|
||||||
|
|
||||||
def download_and_append_fragments_multiple(self, *args, pack_func=None, finish_func=None):
|
def download_and_append_fragments_multiple(self, *args, **kwargs):
|
||||||
'''
|
'''
|
||||||
@params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ...
|
@params (ctx1, fragments1, info_dict1), (ctx2, fragments2, info_dict2), ...
|
||||||
all args must be either tuple or list
|
all args must be either tuple or list
|
||||||
@@ -370,18 +379,17 @@ def download_and_append_fragments_multiple(self, *args, pack_func=None, finish_f
|
|||||||
interrupt_trigger = [True]
|
interrupt_trigger = [True]
|
||||||
max_progress = len(args)
|
max_progress = len(args)
|
||||||
if max_progress == 1:
|
if max_progress == 1:
|
||||||
return self.download_and_append_fragments(*args[0], pack_func=pack_func, finish_func=finish_func)
|
return self.download_and_append_fragments(*args[0], **kwargs)
|
||||||
max_workers = self.params.get('concurrent_fragment_downloads', 1)
|
max_workers = self.params.get('concurrent_fragment_downloads', 1)
|
||||||
if max_progress > 1:
|
if max_progress > 1:
|
||||||
self._prepare_multiline_status(max_progress)
|
self._prepare_multiline_status(max_progress)
|
||||||
is_live = any(traverse_obj(args, (..., 2, 'is_live'), default=[]))
|
is_live = any(traverse_obj(args, (..., 2, 'is_live')))
|
||||||
|
|
||||||
def thread_func(idx, ctx, fragments, info_dict, tpe):
|
def thread_func(idx, ctx, fragments, info_dict, tpe):
|
||||||
ctx['max_progress'] = max_progress
|
ctx['max_progress'] = max_progress
|
||||||
ctx['progress_idx'] = idx
|
ctx['progress_idx'] = idx
|
||||||
return self.download_and_append_fragments(
|
return self.download_and_append_fragments(
|
||||||
ctx, fragments, info_dict, pack_func=pack_func, finish_func=finish_func,
|
ctx, fragments, info_dict, **kwargs, tpe=tpe, interrupt_trigger=interrupt_trigger)
|
||||||
tpe=tpe, interrupt_trigger=interrupt_trigger)
|
|
||||||
|
|
||||||
class FTPE(concurrent.futures.ThreadPoolExecutor):
|
class FTPE(concurrent.futures.ThreadPoolExecutor):
|
||||||
# has to stop this or it's going to wait on the worker thread itself
|
# has to stop this or it's going to wait on the worker thread itself
|
||||||
@@ -428,17 +436,12 @@ def interrupt_trigger_iter(fg):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def download_and_append_fragments(
|
def download_and_append_fragments(
|
||||||
self, ctx, fragments, info_dict, *, pack_func=None, finish_func=None,
|
self, ctx, fragments, info_dict, *, is_fatal=(lambda idx: False),
|
||||||
tpe=None, interrupt_trigger=None):
|
pack_func=(lambda content, idx: content), finish_func=None,
|
||||||
if not interrupt_trigger:
|
tpe=None, interrupt_trigger=(True, )):
|
||||||
interrupt_trigger = (True, )
|
|
||||||
|
|
||||||
is_fatal = (
|
if not self.params.get('skip_unavailable_fragments', True):
|
||||||
((lambda _: False) if info_dict.get('is_live') else (lambda idx: idx == 0))
|
is_fatal = lambda _: True
|
||||||
if self.params.get('skip_unavailable_fragments', True) else (lambda _: True))
|
|
||||||
|
|
||||||
if not pack_func:
|
|
||||||
pack_func = lambda frag_content, _: frag_content
|
|
||||||
|
|
||||||
def download_fragment(fragment, ctx):
|
def download_fragment(fragment, ctx):
|
||||||
if not interrupt_trigger[0]:
|
if not interrupt_trigger[0]:
|
||||||
@@ -463,7 +466,8 @@ def error_callback(err, count, retries):
|
|||||||
for retry in RetryManager(self.params.get('fragment_retries'), error_callback):
|
for retry in RetryManager(self.params.get('fragment_retries'), error_callback):
|
||||||
try:
|
try:
|
||||||
ctx['fragment_count'] = fragment.get('fragment_count')
|
ctx['fragment_count'] = fragment.get('fragment_count')
|
||||||
if not self._download_fragment(ctx, fragment['url'], info_dict, headers):
|
if not self._download_fragment(
|
||||||
|
ctx, fragment['url'], info_dict, headers, info_dict.get('request_data')):
|
||||||
return
|
return
|
||||||
except (urllib.error.HTTPError, http.client.IncompleteRead) as err:
|
except (urllib.error.HTTPError, http.client.IncompleteRead) as err:
|
||||||
retry.error = err
|
retry.error = err
|
||||||
@@ -493,7 +497,7 @@ def _download_fragment(fragment):
|
|||||||
download_fragment(fragment, ctx_copy)
|
download_fragment(fragment, ctx_copy)
|
||||||
return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
|
return fragment, fragment['frag_index'], ctx_copy.get('fragment_filename_sanitized')
|
||||||
|
|
||||||
self.report_warning('The download speed shown is only of one thread. This is a known issue and patches are welcome')
|
self.report_warning('The download speed shown is only of one thread. This is a known issue')
|
||||||
with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
|
with tpe or concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
|
||||||
try:
|
try:
|
||||||
for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):
|
for fragment, frag_index, frag_filename in pool.map(_download_fragment, fragments):
|
||||||
@@ -527,5 +531,4 @@ def _download_fragment(fragment):
|
|||||||
if finish_func is not None:
|
if finish_func is not None:
|
||||||
ctx['dest_stream'].write(finish_func())
|
ctx['dest_stream'].write(finish_func())
|
||||||
ctx['dest_stream'].flush()
|
ctx['dest_stream'].flush()
|
||||||
self._finish_frag_download(ctx, info_dict)
|
return self._finish_frag_download(ctx, info_dict)
|
||||||
return True
|
|
||||||
|
|||||||
@@ -7,8 +7,15 @@
|
|||||||
from .external import FFmpegFD
|
from .external import FFmpegFD
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from .. import webvtt
|
from .. import webvtt
|
||||||
from ..dependencies import Cryptodome_AES
|
from ..dependencies import Cryptodome
|
||||||
from ..utils import bug_reports_message, parse_m3u8_attributes, update_url_query
|
from ..utils import (
|
||||||
|
bug_reports_message,
|
||||||
|
parse_m3u8_attributes,
|
||||||
|
remove_start,
|
||||||
|
traverse_obj,
|
||||||
|
update_url_query,
|
||||||
|
urljoin,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HlsFD(FragmentFD):
|
class HlsFD(FragmentFD):
|
||||||
@@ -63,7 +70,7 @@ def real_download(self, filename, info_dict):
|
|||||||
can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None
|
can_download, message = self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')), None
|
||||||
if can_download:
|
if can_download:
|
||||||
has_ffmpeg = FFmpegFD.available()
|
has_ffmpeg = FFmpegFD.available()
|
||||||
no_crypto = not Cryptodome_AES and '#EXT-X-KEY:METHOD=AES-128' in s
|
no_crypto = not Cryptodome.AES and '#EXT-X-KEY:METHOD=AES-128' in s
|
||||||
if no_crypto and has_ffmpeg:
|
if no_crypto and has_ffmpeg:
|
||||||
can_download, message = False, 'The stream has AES-128 encryption and pycryptodomex is not available'
|
can_download, message = False, 'The stream has AES-128 encryption and pycryptodomex is not available'
|
||||||
elif no_crypto:
|
elif no_crypto:
|
||||||
@@ -150,6 +157,13 @@ def is_ad_fragment_end(s):
|
|||||||
i = 0
|
i = 0
|
||||||
media_sequence = 0
|
media_sequence = 0
|
||||||
decrypt_info = {'METHOD': 'NONE'}
|
decrypt_info = {'METHOD': 'NONE'}
|
||||||
|
external_aes_key = traverse_obj(info_dict, ('hls_aes', 'key'))
|
||||||
|
if external_aes_key:
|
||||||
|
external_aes_key = binascii.unhexlify(remove_start(external_aes_key, '0x'))
|
||||||
|
assert len(external_aes_key) in (16, 24, 32), 'Invalid length for HLS AES-128 key'
|
||||||
|
external_aes_iv = traverse_obj(info_dict, ('hls_aes', 'iv'))
|
||||||
|
if external_aes_iv:
|
||||||
|
external_aes_iv = binascii.unhexlify(remove_start(external_aes_iv, '0x').zfill(32))
|
||||||
byte_range = {}
|
byte_range = {}
|
||||||
discontinuity_count = 0
|
discontinuity_count = 0
|
||||||
frag_index = 0
|
frag_index = 0
|
||||||
@@ -165,10 +179,7 @@ def is_ad_fragment_end(s):
|
|||||||
frag_index += 1
|
frag_index += 1
|
||||||
if frag_index <= ctx['fragment_index']:
|
if frag_index <= ctx['fragment_index']:
|
||||||
continue
|
continue
|
||||||
frag_url = (
|
frag_url = urljoin(man_url, line)
|
||||||
line
|
|
||||||
if re.match(r'^https?://', line)
|
|
||||||
else urllib.parse.urljoin(man_url, line))
|
|
||||||
if extra_query:
|
if extra_query:
|
||||||
frag_url = update_url_query(frag_url, extra_query)
|
frag_url = update_url_query(frag_url, extra_query)
|
||||||
|
|
||||||
@@ -190,10 +201,7 @@ def is_ad_fragment_end(s):
|
|||||||
return False
|
return False
|
||||||
frag_index += 1
|
frag_index += 1
|
||||||
map_info = parse_m3u8_attributes(line[11:])
|
map_info = parse_m3u8_attributes(line[11:])
|
||||||
frag_url = (
|
frag_url = urljoin(man_url, map_info.get('URI'))
|
||||||
map_info.get('URI')
|
|
||||||
if re.match(r'^https?://', map_info.get('URI'))
|
|
||||||
else urllib.parse.urljoin(man_url, map_info.get('URI')))
|
|
||||||
if extra_query:
|
if extra_query:
|
||||||
frag_url = update_url_query(frag_url, extra_query)
|
frag_url = update_url_query(frag_url, extra_query)
|
||||||
|
|
||||||
@@ -218,15 +226,18 @@ def is_ad_fragment_end(s):
|
|||||||
decrypt_url = decrypt_info.get('URI')
|
decrypt_url = decrypt_info.get('URI')
|
||||||
decrypt_info = parse_m3u8_attributes(line[11:])
|
decrypt_info = parse_m3u8_attributes(line[11:])
|
||||||
if decrypt_info['METHOD'] == 'AES-128':
|
if decrypt_info['METHOD'] == 'AES-128':
|
||||||
if 'IV' in decrypt_info:
|
if external_aes_iv:
|
||||||
|
decrypt_info['IV'] = external_aes_iv
|
||||||
|
elif 'IV' in decrypt_info:
|
||||||
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32))
|
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32))
|
||||||
if not re.match(r'^https?://', decrypt_info['URI']):
|
if external_aes_key:
|
||||||
decrypt_info['URI'] = urllib.parse.urljoin(
|
decrypt_info['KEY'] = external_aes_key
|
||||||
man_url, decrypt_info['URI'])
|
else:
|
||||||
if extra_query:
|
decrypt_info['URI'] = urljoin(man_url, decrypt_info['URI'])
|
||||||
decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query)
|
if extra_query:
|
||||||
if decrypt_url != decrypt_info['URI']:
|
decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query)
|
||||||
decrypt_info['KEY'] = None
|
if decrypt_url != decrypt_info['URI']:
|
||||||
|
decrypt_info['KEY'] = None
|
||||||
|
|
||||||
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
|
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
|
||||||
media_sequence = int(line[22:])
|
media_sequence = int(line[22:])
|
||||||
|
|||||||
@@ -211,7 +211,12 @@ def close_stream():
|
|||||||
ctx.stream = None
|
ctx.stream = None
|
||||||
|
|
||||||
def download():
|
def download():
|
||||||
data_len = ctx.data.info().get('Content-length', None)
|
data_len = ctx.data.info().get('Content-length')
|
||||||
|
|
||||||
|
if ctx.data.info().get('Content-encoding'):
|
||||||
|
# Content-encoding is present, Content-length is not reliable anymore as we are
|
||||||
|
# doing auto decompression. (See: https://github.com/yt-dlp/yt-dlp/pull/6176)
|
||||||
|
data_len = None
|
||||||
|
|
||||||
# Range HTTP header may be ignored/unsupported by a webserver
|
# Range HTTP header may be ignored/unsupported by a webserver
|
||||||
# (e.g. extractor/scivee.py, extractor/bambuser.py).
|
# (e.g. extractor/scivee.py, extractor/bambuser.py).
|
||||||
|
|||||||
@@ -138,6 +138,8 @@ def write_piff_header(stream, params):
|
|||||||
|
|
||||||
if fourcc == 'AACL':
|
if fourcc == 'AACL':
|
||||||
sample_entry_box = box(b'mp4a', sample_entry_payload)
|
sample_entry_box = box(b'mp4a', sample_entry_payload)
|
||||||
|
if fourcc == 'EC-3':
|
||||||
|
sample_entry_box = box(b'ec-3', sample_entry_payload)
|
||||||
elif stream_type == 'video':
|
elif stream_type == 'video':
|
||||||
sample_entry_payload += u16.pack(0) # pre defined
|
sample_entry_payload += u16.pack(0) # pre defined
|
||||||
sample_entry_payload += u16.pack(0) # reserved
|
sample_entry_payload += u16.pack(0) # reserved
|
||||||
@@ -278,5 +280,4 @@ def real_download(self, filename, info_dict):
|
|||||||
return False
|
return False
|
||||||
self.report_skip_fragment(frag_index)
|
self.report_skip_fragment(frag_index)
|
||||||
|
|
||||||
self._finish_frag_download(ctx, info_dict)
|
return self._finish_frag_download(ctx, info_dict)
|
||||||
return True
|
|
||||||
|
|||||||
@@ -186,5 +186,4 @@ def real_download(self, filename, info_dict):
|
|||||||
|
|
||||||
ctx['dest_stream'].write(
|
ctx['dest_stream'].write(
|
||||||
b'--%b--\r\n\r\n' % frag_boundary.encode('us-ascii'))
|
b'--%b--\r\n\r\n' % frag_boundary.encode('us-ascii'))
|
||||||
self._finish_frag_download(ctx, info_dict)
|
return self._finish_frag_download(ctx, info_dict)
|
||||||
return True
|
|
||||||
|
|||||||
@@ -191,8 +191,7 @@ def download_and_parse_fragment(url, frag_index, request_data=None, headers=None
|
|||||||
if test:
|
if test:
|
||||||
break
|
break
|
||||||
|
|
||||||
self._finish_frag_download(ctx, info_dict)
|
return self._finish_frag_download(ctx, info_dict)
|
||||||
return True
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_live_timestamp(action):
|
def parse_live_timestamp(action):
|
||||||
|
|||||||
@@ -1,5 +1,30 @@
|
|||||||
# flake8: noqa: F401
|
# flake8: noqa: F401
|
||||||
|
|
||||||
|
from .youtube import ( # Youtube is moved to the top to improve performance
|
||||||
|
YoutubeIE,
|
||||||
|
YoutubeClipIE,
|
||||||
|
YoutubeFavouritesIE,
|
||||||
|
YoutubeNotificationsIE,
|
||||||
|
YoutubeHistoryIE,
|
||||||
|
YoutubeTabIE,
|
||||||
|
YoutubeLivestreamEmbedIE,
|
||||||
|
YoutubePlaylistIE,
|
||||||
|
YoutubeRecommendedIE,
|
||||||
|
YoutubeSearchDateIE,
|
||||||
|
YoutubeSearchIE,
|
||||||
|
YoutubeSearchURLIE,
|
||||||
|
YoutubeMusicSearchURLIE,
|
||||||
|
YoutubeSubscriptionsIE,
|
||||||
|
YoutubeStoriesIE,
|
||||||
|
YoutubeTruncatedIDIE,
|
||||||
|
YoutubeTruncatedURLIE,
|
||||||
|
YoutubeYtBeIE,
|
||||||
|
YoutubeYtUserIE,
|
||||||
|
YoutubeWatchLaterIE,
|
||||||
|
YoutubeShortsAudioPivotIE,
|
||||||
|
YoutubeConsentRedirectIE,
|
||||||
|
)
|
||||||
|
|
||||||
from .abc import (
|
from .abc import (
|
||||||
ABCIE,
|
ABCIE,
|
||||||
ABCIViewIE,
|
ABCIViewIE,
|
||||||
@@ -41,12 +66,21 @@
|
|||||||
HistoryPlayerIE,
|
HistoryPlayerIE,
|
||||||
BiographyIE,
|
BiographyIE,
|
||||||
)
|
)
|
||||||
|
from .aeonco import AeonCoIE
|
||||||
from .afreecatv import (
|
from .afreecatv import (
|
||||||
AfreecaTVIE,
|
AfreecaTVIE,
|
||||||
AfreecaTVLiveIE,
|
AfreecaTVLiveIE,
|
||||||
AfreecaTVUserIE,
|
AfreecaTVUserIE,
|
||||||
)
|
)
|
||||||
|
from .agora import (
|
||||||
|
TokFMAuditionIE,
|
||||||
|
TokFMPodcastIE,
|
||||||
|
WyborczaPodcastIE,
|
||||||
|
WyborczaVideoIE,
|
||||||
|
)
|
||||||
from .airmozilla import AirMozillaIE
|
from .airmozilla import AirMozillaIE
|
||||||
|
from .airtv import AirTVIE
|
||||||
|
from .aitube import AitubeKZVideoIE
|
||||||
from .aljazeera import AlJazeeraIE
|
from .aljazeera import AlJazeeraIE
|
||||||
from .alphaporno import AlphaPornoIE
|
from .alphaporno import AlphaPornoIE
|
||||||
from .amara import AmaraIE
|
from .amara import AmaraIE
|
||||||
@@ -55,13 +89,21 @@
|
|||||||
AluraCourseIE
|
AluraCourseIE
|
||||||
)
|
)
|
||||||
from .amcnetworks import AMCNetworksIE
|
from .amcnetworks import AMCNetworksIE
|
||||||
from .amazon import AmazonStoreIE
|
from .amazon import (
|
||||||
|
AmazonStoreIE,
|
||||||
|
AmazonReviewsIE,
|
||||||
|
)
|
||||||
|
from .amazonminitv import (
|
||||||
|
AmazonMiniTVIE,
|
||||||
|
AmazonMiniTVSeasonIE,
|
||||||
|
AmazonMiniTVSeriesIE,
|
||||||
|
)
|
||||||
from .americastestkitchen import (
|
from .americastestkitchen import (
|
||||||
AmericasTestKitchenIE,
|
AmericasTestKitchenIE,
|
||||||
AmericasTestKitchenSeasonIE,
|
AmericasTestKitchenSeasonIE,
|
||||||
)
|
)
|
||||||
|
from .anchorfm import AnchorFMEpisodeIE
|
||||||
from .angel import AngelIE
|
from .angel import AngelIE
|
||||||
from .animeondemand import AnimeOnDemandIE
|
|
||||||
from .anvato import AnvatoIE
|
from .anvato import AnvatoIE
|
||||||
from .aol import AolIE
|
from .aol import AolIE
|
||||||
from .allocine import AllocineIE
|
from .allocine import AllocineIE
|
||||||
@@ -81,6 +123,7 @@
|
|||||||
from .archiveorg import (
|
from .archiveorg import (
|
||||||
ArchiveOrgIE,
|
ArchiveOrgIE,
|
||||||
YoutubeWebArchiveIE,
|
YoutubeWebArchiveIE,
|
||||||
|
VLiveWebArchiveIE,
|
||||||
)
|
)
|
||||||
from .arcpublishing import ArcPublishingIE
|
from .arcpublishing import ArcPublishingIE
|
||||||
from .arkena import ArkenaIE
|
from .arkena import ArkenaIE
|
||||||
@@ -148,7 +191,12 @@
|
|||||||
from .beeg import BeegIE
|
from .beeg import BeegIE
|
||||||
from .behindkink import BehindKinkIE
|
from .behindkink import BehindKinkIE
|
||||||
from .bellmedia import BellMediaIE
|
from .bellmedia import BellMediaIE
|
||||||
|
from .beatbump import (
|
||||||
|
BeatBumpVideoIE,
|
||||||
|
BeatBumpPlaylistIE,
|
||||||
|
)
|
||||||
from .beatport import BeatportIE
|
from .beatport import BeatportIE
|
||||||
|
from .berufetv import BerufeTVIE
|
||||||
from .bet import BetIE
|
from .bet import BetIE
|
||||||
from .bfi import BFIPlayerIE
|
from .bfi import BFIPlayerIE
|
||||||
from .bfmtv import (
|
from .bfmtv import (
|
||||||
@@ -162,13 +210,16 @@
|
|||||||
from .bild import BildIE
|
from .bild import BildIE
|
||||||
from .bilibili import (
|
from .bilibili import (
|
||||||
BiliBiliIE,
|
BiliBiliIE,
|
||||||
|
BiliBiliBangumiIE,
|
||||||
|
BiliBiliBangumiMediaIE,
|
||||||
BiliBiliSearchIE,
|
BiliBiliSearchIE,
|
||||||
BilibiliCategoryIE,
|
BilibiliCategoryIE,
|
||||||
BiliBiliBangumiIE,
|
|
||||||
BilibiliAudioIE,
|
BilibiliAudioIE,
|
||||||
BilibiliAudioAlbumIE,
|
BilibiliAudioAlbumIE,
|
||||||
BiliBiliPlayerIE,
|
BiliBiliPlayerIE,
|
||||||
BilibiliChannelIE,
|
BilibiliSpaceVideoIE,
|
||||||
|
BilibiliSpaceAudioIE,
|
||||||
|
BilibiliSpacePlaylistIE,
|
||||||
BiliIntlIE,
|
BiliIntlIE,
|
||||||
BiliIntlSeriesIE,
|
BiliIntlSeriesIE,
|
||||||
BiliLiveIE,
|
BiliLiveIE,
|
||||||
@@ -188,12 +239,15 @@
|
|||||||
BleacherReportIE,
|
BleacherReportIE,
|
||||||
BleacherReportCMSIE,
|
BleacherReportCMSIE,
|
||||||
)
|
)
|
||||||
|
from .blerp import BlerpIE
|
||||||
from .blogger import BloggerIE
|
from .blogger import BloggerIE
|
||||||
from .bloomberg import BloombergIE
|
from .bloomberg import BloombergIE
|
||||||
from .bokecc import BokeCCIE
|
from .bokecc import BokeCCIE
|
||||||
from .bongacams import BongaCamsIE
|
from .bongacams import BongaCamsIE
|
||||||
from .bostonglobe import BostonGlobeIE
|
from .bostonglobe import BostonGlobeIE
|
||||||
from .box import BoxIE
|
from .box import BoxIE
|
||||||
|
from .boxcast import BoxCastVideoIE
|
||||||
|
from .booyah import BooyahClipsIE
|
||||||
from .bpb import BpbIE
|
from .bpb import BpbIE
|
||||||
from .br import (
|
from .br import (
|
||||||
BRIE,
|
BRIE,
|
||||||
@@ -207,6 +261,7 @@
|
|||||||
BrightcoveNewIE,
|
BrightcoveNewIE,
|
||||||
)
|
)
|
||||||
from .businessinsider import BusinessInsiderIE
|
from .businessinsider import BusinessInsiderIE
|
||||||
|
from .bundesliga import BundesligaIE
|
||||||
from .buzzfeed import BuzzFeedIE
|
from .buzzfeed import BuzzFeedIE
|
||||||
from .byutv import BYUtvIE
|
from .byutv import BYUtvIE
|
||||||
from .c56 import C56IE
|
from .c56 import C56IE
|
||||||
@@ -219,6 +274,7 @@
|
|||||||
CamdemyFolderIE
|
CamdemyFolderIE
|
||||||
)
|
)
|
||||||
from .cammodels import CamModelsIE
|
from .cammodels import CamModelsIE
|
||||||
|
from .camsoda import CamsodaIE
|
||||||
from .camtasia import CamtasiaEmbedIE
|
from .camtasia import CamtasiaEmbedIE
|
||||||
from .camwithher import CamWithHerIE
|
from .camwithher import CamWithHerIE
|
||||||
from .canalalpha import CanalAlphaIE
|
from .canalalpha import CanalAlphaIE
|
||||||
@@ -282,6 +338,7 @@
|
|||||||
)
|
)
|
||||||
from .cinchcast import CinchcastIE
|
from .cinchcast import CinchcastIE
|
||||||
from .cinemax import CinemaxIE
|
from .cinemax import CinemaxIE
|
||||||
|
from .cinetecamilano import CinetecaMilanoIE
|
||||||
from .ciscolive import (
|
from .ciscolive import (
|
||||||
CiscoLiveSessionIE,
|
CiscoLiveSessionIE,
|
||||||
CiscoLiveSearchIE,
|
CiscoLiveSearchIE,
|
||||||
@@ -306,6 +363,7 @@
|
|||||||
CNNIE,
|
CNNIE,
|
||||||
CNNBlogsIE,
|
CNNBlogsIE,
|
||||||
CNNArticleIE,
|
CNNArticleIE,
|
||||||
|
CNNIndonesiaIE,
|
||||||
)
|
)
|
||||||
from .coub import CoubIE
|
from .coub import CoubIE
|
||||||
from .comedycentral import (
|
from .comedycentral import (
|
||||||
@@ -335,8 +393,6 @@
|
|||||||
CrowdBunkerChannelIE,
|
CrowdBunkerChannelIE,
|
||||||
)
|
)
|
||||||
from .crunchyroll import (
|
from .crunchyroll import (
|
||||||
CrunchyrollIE,
|
|
||||||
CrunchyrollShowPlaylistIE,
|
|
||||||
CrunchyrollBetaIE,
|
CrunchyrollBetaIE,
|
||||||
CrunchyrollBetaShowIE,
|
CrunchyrollBetaShowIE,
|
||||||
)
|
)
|
||||||
@@ -384,7 +440,7 @@
|
|||||||
DeezerAlbumIE,
|
DeezerAlbumIE,
|
||||||
)
|
)
|
||||||
from .democracynow import DemocracynowIE
|
from .democracynow import DemocracynowIE
|
||||||
from .detik import Detik20IE
|
from .detik import DetikEmbedIE
|
||||||
from .dfb import DFBIE
|
from .dfb import DFBIE
|
||||||
from .dhm import DHMIE
|
from .dhm import DHMIE
|
||||||
from .digg import DiggIE
|
from .digg import DiggIE
|
||||||
@@ -411,6 +467,7 @@
|
|||||||
AnimalPlanetIE,
|
AnimalPlanetIE,
|
||||||
TLCIE,
|
TLCIE,
|
||||||
MotorTrendIE,
|
MotorTrendIE,
|
||||||
|
MotorTrendOnDemandIE,
|
||||||
DiscoveryPlusIndiaIE,
|
DiscoveryPlusIndiaIE,
|
||||||
DiscoveryNetworksDeIE,
|
DiscoveryNetworksDeIE,
|
||||||
DiscoveryPlusItalyIE,
|
DiscoveryPlusItalyIE,
|
||||||
@@ -423,6 +480,8 @@
|
|||||||
from .drtv import (
|
from .drtv import (
|
||||||
DRTVIE,
|
DRTVIE,
|
||||||
DRTVLiveIE,
|
DRTVLiveIE,
|
||||||
|
DRTVSeasonIE,
|
||||||
|
DRTVSeriesIE,
|
||||||
)
|
)
|
||||||
from .dtube import DTubeIE
|
from .dtube import DTubeIE
|
||||||
from .dvtv import DVTVIE
|
from .dvtv import DVTVIE
|
||||||
@@ -432,11 +491,14 @@
|
|||||||
)
|
)
|
||||||
from .dumpert import DumpertIE
|
from .dumpert import DumpertIE
|
||||||
from .defense import DefenseGouvFrIE
|
from .defense import DefenseGouvFrIE
|
||||||
|
from .deuxm import (
|
||||||
|
DeuxMIE,
|
||||||
|
DeuxMNewsIE
|
||||||
|
)
|
||||||
from .digitalconcerthall import DigitalConcertHallIE
|
from .digitalconcerthall import DigitalConcertHallIE
|
||||||
from .discovery import DiscoveryIE
|
from .discovery import DiscoveryIE
|
||||||
from .disney import DisneyIE
|
from .disney import DisneyIE
|
||||||
from .dispeak import DigitallySpeakingIE
|
from .dispeak import DigitallySpeakingIE
|
||||||
from .doodstream import DoodStreamIE
|
|
||||||
from .dropbox import DropboxIE
|
from .dropbox import DropboxIE
|
||||||
from .dropout import (
|
from .dropout import (
|
||||||
DropoutSeasonIE,
|
DropoutSeasonIE,
|
||||||
@@ -448,6 +510,7 @@
|
|||||||
)
|
)
|
||||||
from .eagleplatform import EaglePlatformIE, ClipYouEmbedIE
|
from .eagleplatform import EaglePlatformIE, ClipYouEmbedIE
|
||||||
from .ebaumsworld import EbaumsWorldIE
|
from .ebaumsworld import EbaumsWorldIE
|
||||||
|
from .ebay import EbayIE
|
||||||
from .echomsk import EchoMskIE
|
from .echomsk import EchoMskIE
|
||||||
from .egghead import (
|
from .egghead import (
|
||||||
EggheadCourseIE,
|
EggheadCourseIE,
|
||||||
@@ -470,6 +533,7 @@
|
|||||||
EpiconIE,
|
EpiconIE,
|
||||||
EpiconSeriesIE,
|
EpiconSeriesIE,
|
||||||
)
|
)
|
||||||
|
from .epoch import EpochIE
|
||||||
from .eporner import EpornerIE
|
from .eporner import EpornerIE
|
||||||
from .eroprofile import (
|
from .eroprofile import (
|
||||||
EroProfileIE,
|
EroProfileIE,
|
||||||
@@ -489,8 +553,9 @@
|
|||||||
ESPNCricInfoIE,
|
ESPNCricInfoIE,
|
||||||
)
|
)
|
||||||
from .esri import EsriVideoIE
|
from .esri import EsriVideoIE
|
||||||
from .europa import EuropaIE
|
from .europa import EuropaIE, EuroParlWebstreamIE
|
||||||
from .europeantour import EuropeanTourIE
|
from .europeantour import EuropeanTourIE
|
||||||
|
from .eurosport import EurosportIE
|
||||||
from .euscreen import EUScreenIE
|
from .euscreen import EUScreenIE
|
||||||
from .expotv import ExpoTVIE
|
from .expotv import ExpoTVIE
|
||||||
from .expressen import ExpressenIE
|
from .expressen import ExpressenIE
|
||||||
@@ -546,6 +611,7 @@
|
|||||||
from .foxnews import (
|
from .foxnews import (
|
||||||
FoxNewsIE,
|
FoxNewsIE,
|
||||||
FoxNewsArticleIE,
|
FoxNewsArticleIE,
|
||||||
|
FoxNewsVideoIE,
|
||||||
)
|
)
|
||||||
from .foxsports import FoxSportsIE
|
from .foxsports import FoxSportsIE
|
||||||
from .fptplay import FptplayIE
|
from .fptplay import FptplayIE
|
||||||
@@ -596,6 +662,10 @@
|
|||||||
from .gdcvault import GDCVaultIE
|
from .gdcvault import GDCVaultIE
|
||||||
from .gedidigital import GediDigitalIE
|
from .gedidigital import GediDigitalIE
|
||||||
from .generic import GenericIE
|
from .generic import GenericIE
|
||||||
|
from .genius import (
|
||||||
|
GeniusIE,
|
||||||
|
GeniusLyricsIE,
|
||||||
|
)
|
||||||
from .gettr import (
|
from .gettr import (
|
||||||
GettrIE,
|
GettrIE,
|
||||||
GettrStreamingIE,
|
GettrStreamingIE,
|
||||||
@@ -623,6 +693,7 @@
|
|||||||
)
|
)
|
||||||
from .googlesearch import GoogleSearchIE
|
from .googlesearch import GoogleSearchIE
|
||||||
from .gopro import GoProIE
|
from .gopro import GoProIE
|
||||||
|
from .goplay import GoPlayIE
|
||||||
from .goshgay import GoshgayIE
|
from .goshgay import GoshgayIE
|
||||||
from .gotostage import GoToStageIE
|
from .gotostage import GoToStageIE
|
||||||
from .gputechconf import GPUTechConfIE
|
from .gputechconf import GPUTechConfIE
|
||||||
@@ -651,6 +722,7 @@
|
|||||||
HotStarIE,
|
HotStarIE,
|
||||||
HotStarPrefixIE,
|
HotStarPrefixIE,
|
||||||
HotStarPlaylistIE,
|
HotStarPlaylistIE,
|
||||||
|
HotStarSeasonIE,
|
||||||
HotStarSeriesIE,
|
HotStarSeriesIE,
|
||||||
)
|
)
|
||||||
from .howcast import HowcastIE
|
from .howcast import HowcastIE
|
||||||
@@ -664,7 +736,10 @@
|
|||||||
HSEShowIE,
|
HSEShowIE,
|
||||||
HSEProductIE,
|
HSEProductIE,
|
||||||
)
|
)
|
||||||
from .genericembeds import HTML5MediaEmbedIE
|
from .genericembeds import (
|
||||||
|
HTML5MediaEmbedIE,
|
||||||
|
QuotedHTMLIE,
|
||||||
|
)
|
||||||
from .huajiao import HuajiaoIE
|
from .huajiao import HuajiaoIE
|
||||||
from .huya import HuyaLiveIE
|
from .huya import HuyaLiveIE
|
||||||
from .huffpost import HuffPostIE
|
from .huffpost import HuffPostIE
|
||||||
@@ -674,6 +749,7 @@
|
|||||||
HungamaAlbumPlaylistIE,
|
HungamaAlbumPlaylistIE,
|
||||||
)
|
)
|
||||||
from .hypem import HypemIE
|
from .hypem import HypemIE
|
||||||
|
from .hypergryph import MonsterSirenHypergryphMusicIE
|
||||||
from .hytale import HytaleIE
|
from .hytale import HytaleIE
|
||||||
from .icareus import IcareusIE
|
from .icareus import IcareusIE
|
||||||
from .ichinanalive import (
|
from .ichinanalive import (
|
||||||
@@ -689,6 +765,7 @@
|
|||||||
IHeartRadioIE,
|
IHeartRadioIE,
|
||||||
IHeartRadioPodcastIE,
|
IHeartRadioPodcastIE,
|
||||||
)
|
)
|
||||||
|
from .iltalehti import IltalehtiIE
|
||||||
from .imdb import (
|
from .imdb import (
|
||||||
ImdbIE,
|
ImdbIE,
|
||||||
ImdbListIE
|
ImdbListIE
|
||||||
@@ -720,6 +797,11 @@
|
|||||||
IqIE,
|
IqIE,
|
||||||
IqAlbumIE
|
IqAlbumIE
|
||||||
)
|
)
|
||||||
|
from .islamchannel import (
|
||||||
|
IslamChannelIE,
|
||||||
|
IslamChannelSeriesIE,
|
||||||
|
)
|
||||||
|
from .israelnationalnews import IsraelNationalNewsIE
|
||||||
from .itprotv import (
|
from .itprotv import (
|
||||||
ITProTVIE,
|
ITProTVIE,
|
||||||
ITProTVCourseIE
|
ITProTVCourseIE
|
||||||
@@ -748,12 +830,21 @@
|
|||||||
JamendoIE,
|
JamendoIE,
|
||||||
JamendoAlbumIE,
|
JamendoAlbumIE,
|
||||||
)
|
)
|
||||||
|
from .japandiet import (
|
||||||
|
ShugiinItvLiveIE,
|
||||||
|
ShugiinItvLiveRoomIE,
|
||||||
|
ShugiinItvVodIE,
|
||||||
|
SangiinInstructionIE,
|
||||||
|
SangiinIE,
|
||||||
|
)
|
||||||
from .jeuxvideo import JeuxVideoIE
|
from .jeuxvideo import JeuxVideoIE
|
||||||
from .jove import JoveIE
|
from .jove import JoveIE
|
||||||
from .joj import JojIE
|
from .joj import JojIE
|
||||||
from .jwplatform import JWPlatformIE
|
from .jwplatform import JWPlatformIE
|
||||||
from .kakao import KakaoIE
|
from .kakao import KakaoIE
|
||||||
from .kaltura import KalturaIE
|
from .kaltura import KalturaIE
|
||||||
|
from .kanal2 import Kanal2IE
|
||||||
|
from .kankanews import KankaNewsIE
|
||||||
from .karaoketv import KaraoketvIE
|
from .karaoketv import KaraoketvIE
|
||||||
from .karrierevideos import KarriereVideosIE
|
from .karrierevideos import KarriereVideosIE
|
||||||
from .keezmovies import KeezMoviesIE
|
from .keezmovies import KeezMoviesIE
|
||||||
@@ -763,10 +854,15 @@
|
|||||||
KhanAcademyIE,
|
KhanAcademyIE,
|
||||||
KhanAcademyUnitIE,
|
KhanAcademyUnitIE,
|
||||||
)
|
)
|
||||||
|
from .kick import (
|
||||||
|
KickIE,
|
||||||
|
KickVODIE,
|
||||||
|
)
|
||||||
from .kicker import KickerIE
|
from .kicker import KickerIE
|
||||||
from .kickstarter import KickStarterIE
|
from .kickstarter import KickStarterIE
|
||||||
from .kinja import KinjaEmbedIE
|
from .kinja import KinjaEmbedIE
|
||||||
from .kinopoisk import KinoPoiskIE
|
from .kinopoisk import KinoPoiskIE
|
||||||
|
from .kommunetv import KommunetvIE
|
||||||
from .kompas import KompasVideoIE
|
from .kompas import KompasVideoIE
|
||||||
from .konserthusetplay import KonserthusetPlayIE
|
from .konserthusetplay import KonserthusetPlayIE
|
||||||
from .koo import KooIE
|
from .koo import KooIE
|
||||||
@@ -847,6 +943,7 @@
|
|||||||
)
|
)
|
||||||
from .linuxacademy import LinuxAcademyIE
|
from .linuxacademy import LinuxAcademyIE
|
||||||
from .liputan6 import Liputan6IE
|
from .liputan6 import Liputan6IE
|
||||||
|
from .listennotes import ListenNotesIE
|
||||||
from .litv import LiTVIE
|
from .litv import LiTVIE
|
||||||
from .livejournal import LiveJournalIE
|
from .livejournal import LiveJournalIE
|
||||||
from .livestream import (
|
from .livestream import (
|
||||||
@@ -909,6 +1006,11 @@
|
|||||||
MediasiteCatalogIE,
|
MediasiteCatalogIE,
|
||||||
MediasiteNamedCatalogIE,
|
MediasiteNamedCatalogIE,
|
||||||
)
|
)
|
||||||
|
from .mediastream import (
|
||||||
|
MediaStreamIE,
|
||||||
|
WinSportsVideoIE,
|
||||||
|
)
|
||||||
|
from .mediaworksnz import MediaWorksNZVODIE
|
||||||
from .medici import MediciIE
|
from .medici import MediciIE
|
||||||
from .megaphone import MegaphoneIE
|
from .megaphone import MegaphoneIE
|
||||||
from .meipai import MeipaiIE
|
from .meipai import MeipaiIE
|
||||||
@@ -924,6 +1026,7 @@
|
|||||||
MicrosoftVirtualAcademyIE,
|
MicrosoftVirtualAcademyIE,
|
||||||
MicrosoftVirtualAcademyCourseIE,
|
MicrosoftVirtualAcademyCourseIE,
|
||||||
)
|
)
|
||||||
|
from .microsoftembed import MicrosoftEmbedIE
|
||||||
from .mildom import (
|
from .mildom import (
|
||||||
MildomIE,
|
MildomIE,
|
||||||
MildomVodIE,
|
MildomVodIE,
|
||||||
@@ -958,6 +1061,7 @@
|
|||||||
MLBIE,
|
MLBIE,
|
||||||
MLBVideoIE,
|
MLBVideoIE,
|
||||||
MLBTVIE,
|
MLBTVIE,
|
||||||
|
MLBArticleIE,
|
||||||
)
|
)
|
||||||
from .mlssoccer import MLSSoccerIE
|
from .mlssoccer import MLSSoccerIE
|
||||||
from .mnet import MnetIE
|
from .mnet import MnetIE
|
||||||
@@ -1045,6 +1149,7 @@
|
|||||||
NBCSportsIE,
|
NBCSportsIE,
|
||||||
NBCSportsStreamIE,
|
NBCSportsStreamIE,
|
||||||
NBCSportsVPlayerIE,
|
NBCSportsVPlayerIE,
|
||||||
|
NBCStationsIE,
|
||||||
)
|
)
|
||||||
from .ndr import (
|
from .ndr import (
|
||||||
NDRIE,
|
NDRIE,
|
||||||
@@ -1073,12 +1178,14 @@
|
|||||||
from .netverse import (
|
from .netverse import (
|
||||||
NetverseIE,
|
NetverseIE,
|
||||||
NetversePlaylistIE,
|
NetversePlaylistIE,
|
||||||
|
NetverseSearchIE,
|
||||||
)
|
)
|
||||||
from .newgrounds import (
|
from .newgrounds import (
|
||||||
NewgroundsIE,
|
NewgroundsIE,
|
||||||
NewgroundsPlaylistIE,
|
NewgroundsPlaylistIE,
|
||||||
NewgroundsUserIE,
|
NewgroundsUserIE,
|
||||||
)
|
)
|
||||||
|
from .newspicks import NewsPicksIE
|
||||||
from .newstube import NewstubeIE
|
from .newstube import NewstubeIE
|
||||||
from .newsy import NewsyIE
|
from .newsy import NewsyIE
|
||||||
from .nextmedia import (
|
from .nextmedia import (
|
||||||
@@ -1096,6 +1203,8 @@
|
|||||||
from .nfl import (
|
from .nfl import (
|
||||||
NFLIE,
|
NFLIE,
|
||||||
NFLArticleIE,
|
NFLArticleIE,
|
||||||
|
NFLPlusEpisodeIE,
|
||||||
|
NFLPlusReplayIE,
|
||||||
)
|
)
|
||||||
from .nhk import (
|
from .nhk import (
|
||||||
NhkVodIE,
|
NhkVodIE,
|
||||||
@@ -1133,11 +1242,13 @@
|
|||||||
from .nitter import NitterIE
|
from .nitter import NitterIE
|
||||||
from .njpwworld import NJPWWorldIE
|
from .njpwworld import NJPWWorldIE
|
||||||
from .nobelprize import NobelPrizeIE
|
from .nobelprize import NobelPrizeIE
|
||||||
|
from .noice import NoicePodcastIE
|
||||||
from .nonktube import NonkTubeIE
|
from .nonktube import NonkTubeIE
|
||||||
from .noodlemagazine import NoodleMagazineIE
|
from .noodlemagazine import NoodleMagazineIE
|
||||||
from .noovo import NoovoIE
|
from .noovo import NoovoIE
|
||||||
from .normalboots import NormalbootsIE
|
from .normalboots import NormalbootsIE
|
||||||
from .nosvideo import NosVideoIE
|
from .nosvideo import NosVideoIE
|
||||||
|
from .nosnl import NOSNLArticleIE
|
||||||
from .nova import (
|
from .nova import (
|
||||||
NovaEmbedIE,
|
NovaEmbedIE,
|
||||||
NovaIE,
|
NovaIE,
|
||||||
@@ -1184,14 +1295,22 @@
|
|||||||
)
|
)
|
||||||
from .nuvid import NuvidIE
|
from .nuvid import NuvidIE
|
||||||
from .nzherald import NZHeraldIE
|
from .nzherald import NZHeraldIE
|
||||||
|
from .nzonscreen import NZOnScreenIE
|
||||||
from .nzz import NZZIE
|
from .nzz import NZZIE
|
||||||
from .odatv import OdaTVIE
|
from .odatv import OdaTVIE
|
||||||
|
from .odkmedia import OnDemandChinaEpisodeIE
|
||||||
from .odnoklassniki import OdnoklassnikiIE
|
from .odnoklassniki import OdnoklassnikiIE
|
||||||
|
from .oftv import (
|
||||||
|
OfTVIE,
|
||||||
|
OfTVPlaylistIE
|
||||||
|
)
|
||||||
from .oktoberfesttv import OktoberfestTVIE
|
from .oktoberfesttv import OktoberfestTVIE
|
||||||
from .olympics import OlympicsReplayIE
|
from .olympics import OlympicsReplayIE
|
||||||
from .on24 import On24IE
|
from .on24 import On24IE
|
||||||
from .ondemandkorea import OnDemandKoreaIE
|
from .ondemandkorea import OnDemandKoreaIE
|
||||||
from .onefootball import OneFootballIE
|
from .onefootball import OneFootballIE
|
||||||
|
from .onenewsnz import OneNewsNZIE
|
||||||
|
from .oneplace import OnePlacePodcastIE
|
||||||
from .onet import (
|
from .onet import (
|
||||||
OnetIE,
|
OnetIE,
|
||||||
OnetChannelIE,
|
OnetChannelIE,
|
||||||
@@ -1300,6 +1419,7 @@
|
|||||||
PluralsightIE,
|
PluralsightIE,
|
||||||
PluralsightCourseIE,
|
PluralsightCourseIE,
|
||||||
)
|
)
|
||||||
|
from .podbayfm import PodbayFMIE, PodbayFMChannelIE
|
||||||
from .podchaser import PodchaserIE
|
from .podchaser import PodchaserIE
|
||||||
from .podomatic import PodomaticIE
|
from .podomatic import PodomaticIE
|
||||||
from .pokemon import (
|
from .pokemon import (
|
||||||
@@ -1313,6 +1433,8 @@
|
|||||||
from .polsatgo import PolsatGoIE
|
from .polsatgo import PolsatGoIE
|
||||||
from .polskieradio import (
|
from .polskieradio import (
|
||||||
PolskieRadioIE,
|
PolskieRadioIE,
|
||||||
|
PolskieRadioLegacyIE,
|
||||||
|
PolskieRadioAuditionIE,
|
||||||
PolskieRadioCategoryIE,
|
PolskieRadioCategoryIE,
|
||||||
PolskieRadioPlayerIE,
|
PolskieRadioPlayerIE,
|
||||||
PolskieRadioPodcastIE,
|
PolskieRadioPodcastIE,
|
||||||
@@ -1340,6 +1462,8 @@
|
|||||||
PuhuTVIE,
|
PuhuTVIE,
|
||||||
PuhuTVSerieIE,
|
PuhuTVSerieIE,
|
||||||
)
|
)
|
||||||
|
from .pr0gramm import Pr0grammStaticIE, Pr0grammIE
|
||||||
|
from .prankcast import PrankCastIE
|
||||||
from .premiershiprugby import PremiershipRugbyIE
|
from .premiershiprugby import PremiershipRugbyIE
|
||||||
from .presstv import PressTVIE
|
from .presstv import PressTVIE
|
||||||
from .projectveritas import ProjectVeritasIE
|
from .projectveritas import ProjectVeritasIE
|
||||||
@@ -1353,6 +1477,7 @@
|
|||||||
)
|
)
|
||||||
from .puls4 import Puls4IE
|
from .puls4 import Puls4IE
|
||||||
from .pyvideo import PyvideoIE
|
from .pyvideo import PyvideoIE
|
||||||
|
from .qingting import QingTingIE
|
||||||
from .qqmusic import (
|
from .qqmusic import (
|
||||||
QQMusicIE,
|
QQMusicIE,
|
||||||
QQMusicSingerIE,
|
QQMusicSingerIE,
|
||||||
@@ -1399,6 +1524,10 @@
|
|||||||
RayWenderlichCourseIE,
|
RayWenderlichCourseIE,
|
||||||
)
|
)
|
||||||
from .rbmaradio import RBMARadioIE
|
from .rbmaradio import RBMARadioIE
|
||||||
|
from .rbgtum import (
|
||||||
|
RbgTumIE,
|
||||||
|
RbgTumCourseIE,
|
||||||
|
)
|
||||||
from .rcs import (
|
from .rcs import (
|
||||||
RCSIE,
|
RCSIE,
|
||||||
RCSEmbedsIE,
|
RCSEmbedsIE,
|
||||||
@@ -1443,7 +1572,10 @@
|
|||||||
)
|
)
|
||||||
from .roosterteeth import RoosterTeethIE, RoosterTeethSeriesIE
|
from .roosterteeth import RoosterTeethIE, RoosterTeethSeriesIE
|
||||||
from .rottentomatoes import RottenTomatoesIE
|
from .rottentomatoes import RottenTomatoesIE
|
||||||
from .rozhlas import RozhlasIE
|
from .rozhlas import (
|
||||||
|
RozhlasIE,
|
||||||
|
RozhlasVltavaIE,
|
||||||
|
)
|
||||||
from .rte import RteIE, RteRadioIE
|
from .rte import RteIE, RteRadioIE
|
||||||
from .rtlnl import (
|
from .rtlnl import (
|
||||||
RtlNlIE,
|
RtlNlIE,
|
||||||
@@ -1480,6 +1612,7 @@
|
|||||||
from .rule34video import Rule34VideoIE
|
from .rule34video import Rule34VideoIE
|
||||||
from .rumble import (
|
from .rumble import (
|
||||||
RumbleEmbedIE,
|
RumbleEmbedIE,
|
||||||
|
RumbleIE,
|
||||||
RumbleChannelIE,
|
RumbleChannelIE,
|
||||||
)
|
)
|
||||||
from .rutube import (
|
from .rutube import (
|
||||||
@@ -1520,7 +1653,9 @@
|
|||||||
from .sapo import SapoIE
|
from .sapo import SapoIE
|
||||||
from .savefrom import SaveFromIE
|
from .savefrom import SaveFromIE
|
||||||
from .sbs import SBSIE
|
from .sbs import SBSIE
|
||||||
|
from .screen9 import Screen9IE
|
||||||
from .screencast import ScreencastIE
|
from .screencast import ScreencastIE
|
||||||
|
from .screencastify import ScreencastifyIE
|
||||||
from .screencastomatic import ScreencastOMaticIE
|
from .screencastomatic import ScreencastOMaticIE
|
||||||
from .scrippsnetworks import (
|
from .scrippsnetworks import (
|
||||||
ScrippsNetworksWatchIE,
|
ScrippsNetworksWatchIE,
|
||||||
@@ -1550,6 +1685,7 @@
|
|||||||
VivoIE,
|
VivoIE,
|
||||||
)
|
)
|
||||||
from .sharevideos import ShareVideosEmbedIE
|
from .sharevideos import ShareVideosEmbedIE
|
||||||
|
from .sibnet import SibnetEmbedIE
|
||||||
from .shemaroome import ShemarooMeIE
|
from .shemaroome import ShemarooMeIE
|
||||||
from .showroomlive import ShowRoomLiveIE
|
from .showroomlive import ShowRoomLiveIE
|
||||||
from .simplecast import (
|
from .simplecast import (
|
||||||
@@ -1565,7 +1701,6 @@
|
|||||||
SkyItVideoIE,
|
SkyItVideoIE,
|
||||||
SkyItVideoLiveIE,
|
SkyItVideoLiveIE,
|
||||||
SkyItIE,
|
SkyItIE,
|
||||||
SkyItAcademyIE,
|
|
||||||
SkyItArteIE,
|
SkyItArteIE,
|
||||||
CieloTVItIE,
|
CieloTVItIE,
|
||||||
TV8ItIE,
|
TV8ItIE,
|
||||||
@@ -1585,6 +1720,7 @@
|
|||||||
from .slideshare import SlideshareIE
|
from .slideshare import SlideshareIE
|
||||||
from .slideslive import SlidesLiveIE
|
from .slideslive import SlidesLiveIE
|
||||||
from .slutload import SlutloadIE
|
from .slutload import SlutloadIE
|
||||||
|
from .smotrim import SmotrimIE
|
||||||
from .snotr import SnotrIE
|
from .snotr import SnotrIE
|
||||||
from .sohu import SohuIE
|
from .sohu import SohuIE
|
||||||
from .sonyliv import (
|
from .sonyliv import (
|
||||||
@@ -1597,6 +1733,7 @@
|
|||||||
SoundcloudSetIE,
|
SoundcloudSetIE,
|
||||||
SoundcloudRelatedIE,
|
SoundcloudRelatedIE,
|
||||||
SoundcloudUserIE,
|
SoundcloudUserIE,
|
||||||
|
SoundcloudUserPermalinkIE,
|
||||||
SoundcloudTrackStationIE,
|
SoundcloudTrackStationIE,
|
||||||
SoundcloudPlaylistIE,
|
SoundcloudPlaylistIE,
|
||||||
SoundcloudSearchIE,
|
SoundcloudSearchIE,
|
||||||
@@ -1684,6 +1821,7 @@
|
|||||||
SVTPlayIE,
|
SVTPlayIE,
|
||||||
SVTSeriesIE,
|
SVTSeriesIE,
|
||||||
)
|
)
|
||||||
|
from .swearnet import SwearnetEpisodeIE
|
||||||
from .swrmediathek import SWRMediathekIE
|
from .swrmediathek import SWRMediathekIE
|
||||||
from .syvdk import SYVDKIE
|
from .syvdk import SYVDKIE
|
||||||
from .syfy import SyfyIE
|
from .syfy import SyfyIE
|
||||||
@@ -1727,7 +1865,15 @@
|
|||||||
)
|
)
|
||||||
from .teletask import TeleTaskIE
|
from .teletask import TeleTaskIE
|
||||||
from .telewebion import TelewebionIE
|
from .telewebion import TelewebionIE
|
||||||
from .tempo import TempoIE
|
from .tempo import TempoIE, IVXPlayerIE
|
||||||
|
from .tencent import (
|
||||||
|
IflixEpisodeIE,
|
||||||
|
IflixSeriesIE,
|
||||||
|
VQQSeriesIE,
|
||||||
|
VQQVideoIE,
|
||||||
|
WeTvEpisodeIE,
|
||||||
|
WeTvSeriesIE,
|
||||||
|
)
|
||||||
from .tennistv import TennisTVIE
|
from .tennistv import TennisTVIE
|
||||||
from .tenplay import TenPlayIE
|
from .tenplay import TenPlayIE
|
||||||
from .testurl import TestURLIE
|
from .testurl import TestURLIE
|
||||||
@@ -1749,6 +1895,11 @@
|
|||||||
from .thisamericanlife import ThisAmericanLifeIE
|
from .thisamericanlife import ThisAmericanLifeIE
|
||||||
from .thisav import ThisAVIE
|
from .thisav import ThisAVIE
|
||||||
from .thisoldhouse import ThisOldHouseIE
|
from .thisoldhouse import ThisOldHouseIE
|
||||||
|
from .thisvid import (
|
||||||
|
ThisVidIE,
|
||||||
|
ThisVidMemberIE,
|
||||||
|
ThisVidPlaylistIE,
|
||||||
|
)
|
||||||
from .threespeak import (
|
from .threespeak import (
|
||||||
ThreeSpeakIE,
|
ThreeSpeakIE,
|
||||||
ThreeSpeakUserIE,
|
ThreeSpeakUserIE,
|
||||||
@@ -1761,6 +1912,7 @@
|
|||||||
TikTokEffectIE,
|
TikTokEffectIE,
|
||||||
TikTokTagIE,
|
TikTokTagIE,
|
||||||
TikTokVMIE,
|
TikTokVMIE,
|
||||||
|
TikTokLiveIE,
|
||||||
DouyinIE,
|
DouyinIE,
|
||||||
)
|
)
|
||||||
from .tinypic import TinyPicIE
|
from .tinypic import TinyPicIE
|
||||||
@@ -1787,6 +1939,10 @@
|
|||||||
from .toutv import TouTvIE
|
from .toutv import TouTvIE
|
||||||
from .toypics import ToypicsUserIE, ToypicsIE
|
from .toypics import ToypicsUserIE, ToypicsIE
|
||||||
from .traileraddict import TrailerAddictIE
|
from .traileraddict import TrailerAddictIE
|
||||||
|
from .triller import (
|
||||||
|
TrillerIE,
|
||||||
|
TrillerUserIE,
|
||||||
|
)
|
||||||
from .trilulilu import TriluliluIE
|
from .trilulilu import TriluliluIE
|
||||||
from .trovo import (
|
from .trovo import (
|
||||||
TrovoIE,
|
TrovoIE,
|
||||||
@@ -1794,6 +1950,7 @@
|
|||||||
TrovoChannelVodIE,
|
TrovoChannelVodIE,
|
||||||
TrovoChannelClipIE,
|
TrovoChannelClipIE,
|
||||||
)
|
)
|
||||||
|
from .trtcocuk import TrtCocukVideoIE
|
||||||
from .trueid import TrueIDIE
|
from .trueid import TrueIDIE
|
||||||
from .trunews import TruNewsIE
|
from .trunews import TruNewsIE
|
||||||
from .truth import TruthIE
|
from .truth import TruthIE
|
||||||
@@ -1820,6 +1977,9 @@
|
|||||||
KatsomoIE,
|
KatsomoIE,
|
||||||
MTVUutisetArticleIE,
|
MTVUutisetArticleIE,
|
||||||
)
|
)
|
||||||
|
from .tv24ua import (
|
||||||
|
TV24UAVideoIE,
|
||||||
|
)
|
||||||
from .tv2dk import (
|
from .tv2dk import (
|
||||||
TV2DKIE,
|
TV2DKIE,
|
||||||
TV2DKBornholmPlayIE,
|
TV2DKBornholmPlayIE,
|
||||||
@@ -1869,7 +2029,8 @@
|
|||||||
TVPEmbedIE,
|
TVPEmbedIE,
|
||||||
TVPIE,
|
TVPIE,
|
||||||
TVPStreamIE,
|
TVPStreamIE,
|
||||||
TVPWebsiteIE,
|
TVPVODSeriesIE,
|
||||||
|
TVPVODVideoIE,
|
||||||
)
|
)
|
||||||
from .tvplay import (
|
from .tvplay import (
|
||||||
TVPlayIE,
|
TVPlayIE,
|
||||||
@@ -1900,8 +2061,13 @@
|
|||||||
TwitterIE,
|
TwitterIE,
|
||||||
TwitterAmplifyIE,
|
TwitterAmplifyIE,
|
||||||
TwitterBroadcastIE,
|
TwitterBroadcastIE,
|
||||||
|
TwitterSpacesIE,
|
||||||
TwitterShortenerIE,
|
TwitterShortenerIE,
|
||||||
)
|
)
|
||||||
|
from .txxx import (
|
||||||
|
TxxxIE,
|
||||||
|
PornTopIE,
|
||||||
|
)
|
||||||
from .udemy import (
|
from .udemy import (
|
||||||
UdemyIE,
|
UdemyIE,
|
||||||
UdemyCourseIE
|
UdemyCourseIE
|
||||||
@@ -1922,6 +2088,8 @@
|
|||||||
from .umg import UMGDeIE
|
from .umg import UMGDeIE
|
||||||
from .unistra import UnistraIE
|
from .unistra import UnistraIE
|
||||||
from .unity import UnityIE
|
from .unity import UnityIE
|
||||||
|
from .unscripted import UnscriptedNewsVideoIE
|
||||||
|
from .unsupported import KnownDRMIE, KnownPiracyIE
|
||||||
from .uol import UOLIE
|
from .uol import UOLIE
|
||||||
from .uplynk import (
|
from .uplynk import (
|
||||||
UplynkIE,
|
UplynkIE,
|
||||||
@@ -1941,7 +2109,10 @@
|
|||||||
from .vbox7 import Vbox7IE
|
from .vbox7 import Vbox7IE
|
||||||
from .veehd import VeeHDIE
|
from .veehd import VeeHDIE
|
||||||
from .veo import VeoIE
|
from .veo import VeoIE
|
||||||
from .veoh import VeohIE
|
from .veoh import (
|
||||||
|
VeohIE,
|
||||||
|
VeohUserIE
|
||||||
|
)
|
||||||
from .vesti import VestiIE
|
from .vesti import VestiIE
|
||||||
from .vevo import (
|
from .vevo import (
|
||||||
VevoIE,
|
VevoIE,
|
||||||
@@ -1967,6 +2138,13 @@
|
|||||||
)
|
)
|
||||||
from .videodetective import VideoDetectiveIE
|
from .videodetective import VideoDetectiveIE
|
||||||
from .videofyme import VideofyMeIE
|
from .videofyme import VideofyMeIE
|
||||||
|
from .videoken import (
|
||||||
|
VideoKenIE,
|
||||||
|
VideoKenPlayerIE,
|
||||||
|
VideoKenPlaylistIE,
|
||||||
|
VideoKenCategoryIE,
|
||||||
|
VideoKenTopicIE,
|
||||||
|
)
|
||||||
from .videomore import (
|
from .videomore import (
|
||||||
VideomoreIE,
|
VideomoreIE,
|
||||||
VideomoreVideoIE,
|
VideomoreVideoIE,
|
||||||
@@ -1979,7 +2157,6 @@
|
|||||||
VidioLiveIE
|
VidioLiveIE
|
||||||
)
|
)
|
||||||
from .vidlii import VidLiiIE
|
from .vidlii import VidLiiIE
|
||||||
from .vier import VierIE, VierVideosIE
|
|
||||||
from .viewlift import (
|
from .viewlift import (
|
||||||
ViewLiftIE,
|
ViewLiftIE,
|
||||||
ViewLiftEmbedIE,
|
ViewLiftEmbedIE,
|
||||||
@@ -1992,6 +2169,7 @@
|
|||||||
VimeoGroupsIE,
|
VimeoGroupsIE,
|
||||||
VimeoLikesIE,
|
VimeoLikesIE,
|
||||||
VimeoOndemandIE,
|
VimeoOndemandIE,
|
||||||
|
VimeoProIE,
|
||||||
VimeoReviewIE,
|
VimeoReviewIE,
|
||||||
VimeoUserIE,
|
VimeoUserIE,
|
||||||
VimeoWatchLaterIE,
|
VimeoWatchLaterIE,
|
||||||
@@ -2015,17 +2193,14 @@
|
|||||||
ViuIE,
|
ViuIE,
|
||||||
ViuPlaylistIE,
|
ViuPlaylistIE,
|
||||||
ViuOTTIE,
|
ViuOTTIE,
|
||||||
|
ViuOTTIndonesiaIE,
|
||||||
)
|
)
|
||||||
from .vk import (
|
from .vk import (
|
||||||
VKIE,
|
VKIE,
|
||||||
VKUserVideosIE,
|
VKUserVideosIE,
|
||||||
VKWallPostIE,
|
VKWallPostIE,
|
||||||
)
|
)
|
||||||
from .vlive import (
|
from .vocaroo import VocarooIE
|
||||||
VLiveIE,
|
|
||||||
VLivePostIE,
|
|
||||||
VLiveChannelIE,
|
|
||||||
)
|
|
||||||
from .vodlocker import VodlockerIE
|
from .vodlocker import VodlockerIE
|
||||||
from .vodpl import VODPlIE
|
from .vodpl import VODPlIE
|
||||||
from .vodplatform import VODPlatformIE
|
from .vodplatform import VODPlatformIE
|
||||||
@@ -2034,6 +2209,7 @@
|
|||||||
VoicyIE,
|
VoicyIE,
|
||||||
VoicyChannelIE,
|
VoicyChannelIE,
|
||||||
)
|
)
|
||||||
|
from .volejtv import VolejTVIE
|
||||||
from .voot import (
|
from .voot import (
|
||||||
VootIE,
|
VootIE,
|
||||||
VootSeriesIE,
|
VootSeriesIE,
|
||||||
@@ -2079,6 +2255,7 @@
|
|||||||
WDRElefantIE,
|
WDRElefantIE,
|
||||||
WDRMobileIE,
|
WDRMobileIE,
|
||||||
)
|
)
|
||||||
|
from .webcamerapl import WebcameraplIE
|
||||||
from .webcaster import (
|
from .webcaster import (
|
||||||
WebcasterIE,
|
WebcasterIE,
|
||||||
WebcasterFeedIE,
|
WebcasterFeedIE,
|
||||||
@@ -2092,7 +2269,6 @@
|
|||||||
WeiboMobileIE
|
WeiboMobileIE
|
||||||
)
|
)
|
||||||
from .weiqitv import WeiqiTVIE
|
from .weiqitv import WeiqiTVIE
|
||||||
from .wetv import WeTvEpisodeIE, WeTvSeriesIE
|
|
||||||
from .wikimedia import WikimediaIE
|
from .wikimedia import WikimediaIE
|
||||||
from .willow import WillowIE
|
from .willow import WillowIE
|
||||||
from .wimtv import WimTVIE
|
from .wimtv import WimTVIE
|
||||||
@@ -2100,17 +2276,27 @@
|
|||||||
from .wistia import (
|
from .wistia import (
|
||||||
WistiaIE,
|
WistiaIE,
|
||||||
WistiaPlaylistIE,
|
WistiaPlaylistIE,
|
||||||
|
WistiaChannelIE,
|
||||||
|
)
|
||||||
|
from .wordpress import (
|
||||||
|
WordpressPlaylistEmbedIE,
|
||||||
|
WordpressMiniAudioPlayerEmbedIE,
|
||||||
)
|
)
|
||||||
from .worldstarhiphop import WorldStarHipHopIE
|
from .worldstarhiphop import WorldStarHipHopIE
|
||||||
from .wppilot import (
|
from .wppilot import (
|
||||||
WPPilotIE,
|
WPPilotIE,
|
||||||
WPPilotChannelsIE,
|
WPPilotChannelsIE,
|
||||||
)
|
)
|
||||||
|
from .wrestleuniverse import (
|
||||||
|
WrestleUniverseVODIE,
|
||||||
|
WrestleUniversePPVIE,
|
||||||
|
)
|
||||||
from .wsj import (
|
from .wsj import (
|
||||||
WSJIE,
|
WSJIE,
|
||||||
WSJArticleIE,
|
WSJArticleIE,
|
||||||
)
|
)
|
||||||
from .wwe import WWEIE
|
from .wwe import WWEIE
|
||||||
|
from .xanimu import XanimuIE
|
||||||
from .xbef import XBefIE
|
from .xbef import XBefIE
|
||||||
from .xboxclips import XboxClipsIE
|
from .xboxclips import XboxClipsIE
|
||||||
from .xfileshare import XFileShareIE
|
from .xfileshare import XFileShareIE
|
||||||
@@ -2119,12 +2305,6 @@
|
|||||||
XHamsterEmbedIE,
|
XHamsterEmbedIE,
|
||||||
XHamsterUserIE,
|
XHamsterUserIE,
|
||||||
)
|
)
|
||||||
from .xiami import (
|
|
||||||
XiamiSongIE,
|
|
||||||
XiamiAlbumIE,
|
|
||||||
XiamiArtistIE,
|
|
||||||
XiamiCollectionIE
|
|
||||||
)
|
|
||||||
from .ximalaya import (
|
from .ximalaya import (
|
||||||
XimalayaIE,
|
XimalayaIE,
|
||||||
XimalayaAlbumIE
|
XimalayaAlbumIE
|
||||||
@@ -2159,8 +2339,10 @@
|
|||||||
ZenYandexChannelIE,
|
ZenYandexChannelIE,
|
||||||
)
|
)
|
||||||
from .yapfiles import YapFilesIE
|
from .yapfiles import YapFilesIE
|
||||||
|
from .yappy import YappyIE
|
||||||
from .yesjapan import YesJapanIE
|
from .yesjapan import YesJapanIE
|
||||||
from .yinyuetai import YinYueTaiIE
|
from .yinyuetai import YinYueTaiIE
|
||||||
|
from .yle_areena import YleAreenaIE
|
||||||
from .ynet import YnetIE
|
from .ynet import YnetIE
|
||||||
from .youjizz import YouJizzIE
|
from .youjizz import YouJizzIE
|
||||||
from .youku import (
|
from .youku import (
|
||||||
@@ -2175,42 +2357,44 @@
|
|||||||
from .youporn import YouPornIE
|
from .youporn import YouPornIE
|
||||||
from .yourporn import YourPornIE
|
from .yourporn import YourPornIE
|
||||||
from .yourupload import YourUploadIE
|
from .yourupload import YourUploadIE
|
||||||
from .youtube import (
|
|
||||||
YoutubeIE,
|
|
||||||
YoutubeClipIE,
|
|
||||||
YoutubeFavouritesIE,
|
|
||||||
YoutubeNotificationsIE,
|
|
||||||
YoutubeHistoryIE,
|
|
||||||
YoutubeTabIE,
|
|
||||||
YoutubeLivestreamEmbedIE,
|
|
||||||
YoutubePlaylistIE,
|
|
||||||
YoutubeRecommendedIE,
|
|
||||||
YoutubeSearchDateIE,
|
|
||||||
YoutubeSearchIE,
|
|
||||||
YoutubeSearchURLIE,
|
|
||||||
YoutubeMusicSearchURLIE,
|
|
||||||
YoutubeSubscriptionsIE,
|
|
||||||
YoutubeStoriesIE,
|
|
||||||
YoutubeTruncatedIDIE,
|
|
||||||
YoutubeTruncatedURLIE,
|
|
||||||
YoutubeYtBeIE,
|
|
||||||
YoutubeYtUserIE,
|
|
||||||
YoutubeWatchLaterIE,
|
|
||||||
)
|
|
||||||
from .zapiks import ZapiksIE
|
from .zapiks import ZapiksIE
|
||||||
from .zattoo import (
|
from .zattoo import (
|
||||||
BBVTVIE,
|
BBVTVIE,
|
||||||
|
BBVTVLiveIE,
|
||||||
|
BBVTVRecordingsIE,
|
||||||
EinsUndEinsTVIE,
|
EinsUndEinsTVIE,
|
||||||
|
EinsUndEinsTVLiveIE,
|
||||||
|
EinsUndEinsTVRecordingsIE,
|
||||||
EWETVIE,
|
EWETVIE,
|
||||||
|
EWETVLiveIE,
|
||||||
|
EWETVRecordingsIE,
|
||||||
GlattvisionTVIE,
|
GlattvisionTVIE,
|
||||||
|
GlattvisionTVLiveIE,
|
||||||
|
GlattvisionTVRecordingsIE,
|
||||||
MNetTVIE,
|
MNetTVIE,
|
||||||
NetPlusIE,
|
MNetTVLiveIE,
|
||||||
|
MNetTVRecordingsIE,
|
||||||
|
NetPlusTVIE,
|
||||||
|
NetPlusTVLiveIE,
|
||||||
|
NetPlusTVRecordingsIE,
|
||||||
OsnatelTVIE,
|
OsnatelTVIE,
|
||||||
|
OsnatelTVLiveIE,
|
||||||
|
OsnatelTVRecordingsIE,
|
||||||
QuantumTVIE,
|
QuantumTVIE,
|
||||||
|
QuantumTVLiveIE,
|
||||||
|
QuantumTVRecordingsIE,
|
||||||
SaltTVIE,
|
SaltTVIE,
|
||||||
|
SaltTVLiveIE,
|
||||||
|
SaltTVRecordingsIE,
|
||||||
SAKTVIE,
|
SAKTVIE,
|
||||||
|
SAKTVLiveIE,
|
||||||
|
SAKTVRecordingsIE,
|
||||||
VTXTVIE,
|
VTXTVIE,
|
||||||
|
VTXTVLiveIE,
|
||||||
|
VTXTVRecordingsIE,
|
||||||
WalyTVIE,
|
WalyTVIE,
|
||||||
|
WalyTVLiveIE,
|
||||||
|
WalyTVRecordingsIE,
|
||||||
ZattooIE,
|
ZattooIE,
|
||||||
ZattooLiveIE,
|
ZattooLiveIE,
|
||||||
ZattooMoviesIE,
|
ZattooMoviesIE,
|
||||||
@@ -2221,6 +2405,7 @@
|
|||||||
Zee5IE,
|
Zee5IE,
|
||||||
Zee5SeriesIE,
|
Zee5SeriesIE,
|
||||||
)
|
)
|
||||||
|
from .zeenews import ZeeNewsIE
|
||||||
from .zhihu import ZhihuIE
|
from .zhihu import ZhihuIE
|
||||||
from .zingmp3 import (
|
from .zingmp3 import (
|
||||||
ZingMp3IE,
|
ZingMp3IE,
|
||||||
|
|||||||
@@ -155,8 +155,6 @@ def _real_extract(self, url):
|
|||||||
'format_id': format_id
|
'format_id': format_id
|
||||||
})
|
})
|
||||||
|
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'title': self._og_search_title(webpage),
|
'title': self._og_search_title(webpage),
|
||||||
@@ -221,7 +219,6 @@ def tokenize_url(url, token):
|
|||||||
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False)
|
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False)
|
||||||
if formats:
|
if formats:
|
||||||
break
|
break
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
subtitles = {}
|
subtitles = {}
|
||||||
src_vtt = stream.get('captions', {}).get('src-vtt')
|
src_vtt = stream.get('captions', {}).get('src-vtt')
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ def _real_extract(self, url):
|
|||||||
'url': mp4_url,
|
'url': mp4_url,
|
||||||
'width': 640,
|
'width': 640,
|
||||||
})
|
})
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
image = video.get('image') or {}
|
image = video.get('image') or {}
|
||||||
|
|
||||||
@@ -119,7 +118,6 @@ def _real_extract(self, url):
|
|||||||
title = video_data['title']
|
title = video_data['title']
|
||||||
formats = self._extract_m3u8_formats(
|
formats = self._extract_m3u8_formats(
|
||||||
video_data['videoURL'].split('?')[0], video_id, 'mp4')
|
video_data['videoURL'].split('?')[0], video_id, 'mp4')
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ class AbemaTVBaseIE(InfoExtractor):
|
|||||||
def _generate_aks(cls, deviceid):
|
def _generate_aks(cls, deviceid):
|
||||||
deviceid = deviceid.encode('utf-8')
|
deviceid = deviceid.encode('utf-8')
|
||||||
# add 1 hour and then drop minute and secs
|
# add 1 hour and then drop minute and secs
|
||||||
ts_1hour = int((time_seconds(hours=9) // 3600 + 1) * 3600)
|
ts_1hour = int((time_seconds() // 3600 + 1) * 3600)
|
||||||
time_struct = time.gmtime(ts_1hour)
|
time_struct = time.gmtime(ts_1hour)
|
||||||
ts_1hour_str = str(ts_1hour).encode('utf-8')
|
ts_1hour_str = str(ts_1hour).encode('utf-8')
|
||||||
|
|
||||||
@@ -190,6 +190,16 @@ def _get_device_token(self):
|
|||||||
if self._USERTOKEN:
|
if self._USERTOKEN:
|
||||||
return self._USERTOKEN
|
return self._USERTOKEN
|
||||||
|
|
||||||
|
username, _ = self._get_login_info()
|
||||||
|
AbemaTVBaseIE._USERTOKEN = username and self.cache.load(self._NETRC_MACHINE, username)
|
||||||
|
if AbemaTVBaseIE._USERTOKEN:
|
||||||
|
# try authentication with locally stored token
|
||||||
|
try:
|
||||||
|
self._get_media_token(True)
|
||||||
|
return
|
||||||
|
except ExtractorError as e:
|
||||||
|
self.report_warning(f'Failed to login with cached user token; obtaining a fresh one ({e})')
|
||||||
|
|
||||||
AbemaTVBaseIE._DEVICE_ID = str(uuid.uuid4())
|
AbemaTVBaseIE._DEVICE_ID = str(uuid.uuid4())
|
||||||
aks = self._generate_aks(self._DEVICE_ID)
|
aks = self._generate_aks(self._DEVICE_ID)
|
||||||
user_data = self._download_json(
|
user_data = self._download_json(
|
||||||
@@ -300,6 +310,11 @@ class AbemaTVIE(AbemaTVBaseIE):
|
|||||||
_TIMETABLE = None
|
_TIMETABLE = None
|
||||||
|
|
||||||
def _perform_login(self, username, password):
|
def _perform_login(self, username, password):
|
||||||
|
self._get_device_token()
|
||||||
|
if self.cache.load(self._NETRC_MACHINE, username) and self._get_media_token():
|
||||||
|
self.write_debug('Skipping logging in')
|
||||||
|
return
|
||||||
|
|
||||||
if '@' in username: # don't strictly check if it's email address or not
|
if '@' in username: # don't strictly check if it's email address or not
|
||||||
ep, method = 'user/email', 'email'
|
ep, method = 'user/email', 'email'
|
||||||
else:
|
else:
|
||||||
@@ -319,6 +334,7 @@ def _perform_login(self, username, password):
|
|||||||
|
|
||||||
AbemaTVBaseIE._USERTOKEN = login_response['token']
|
AbemaTVBaseIE._USERTOKEN = login_response['token']
|
||||||
self._get_media_token(True)
|
self._get_media_token(True)
|
||||||
|
self.cache.store(self._NETRC_MACHINE, username, AbemaTVBaseIE._USERTOKEN)
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
# starting download using infojson from this extractor is undefined behavior,
|
# starting download using infojson from this extractor is undefined behavior,
|
||||||
@@ -416,7 +432,7 @@ def _real_extract(self, url):
|
|||||||
f'https://api.abema.io/v1/video/programs/{video_id}', video_id,
|
f'https://api.abema.io/v1/video/programs/{video_id}', video_id,
|
||||||
note='Checking playability',
|
note='Checking playability',
|
||||||
headers=headers)
|
headers=headers)
|
||||||
ondemand_types = traverse_obj(api_response, ('terms', ..., 'onDemandType'), default=[])
|
ondemand_types = traverse_obj(api_response, ('terms', ..., 'onDemandType'))
|
||||||
if 3 not in ondemand_types:
|
if 3 not in ondemand_types:
|
||||||
# cannot acquire decryption key for these streams
|
# cannot acquire decryption key for these streams
|
||||||
self.report_warning('This is a premium-only stream')
|
self.report_warning('This is a premium-only stream')
|
||||||
@@ -489,7 +505,7 @@ def _fetch_page(self, playlist_id, series_version, page):
|
|||||||
})
|
})
|
||||||
yield from (
|
yield from (
|
||||||
self.url_result(f'https://abema.tv/video/episode/{x}')
|
self.url_result(f'https://abema.tv/video/episode/{x}')
|
||||||
for x in traverse_obj(programs, ('programs', ..., 'id'), default=[]))
|
for x in traverse_obj(programs, ('programs', ..., 'id')))
|
||||||
|
|
||||||
def _entries(self, playlist_id, series_version):
|
def _entries(self, playlist_id, series_version):
|
||||||
return OnDemandPagedList(
|
return OnDemandPagedList(
|
||||||
|
|||||||
@@ -27,7 +27,6 @@ def _extract_metadata(self, video_id, video_info):
|
|||||||
**parse_codecs(video.get('codecs', ''))
|
**parse_codecs(video.get('codecs', ''))
|
||||||
})
|
})
|
||||||
|
|
||||||
self._sort_formats(formats)
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
@@ -84,7 +83,7 @@ def _real_extract(self, url):
|
|||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
|
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
json_all = self._search_json(r'window.videoInfo\s*=\s*', webpage, 'videoInfo', video_id)
|
json_all = self._search_json(r'window.videoInfo\s*=', webpage, 'videoInfo', video_id)
|
||||||
|
|
||||||
title = json_all.get('title')
|
title = json_all.get('title')
|
||||||
video_list = json_all.get('videoList') or []
|
video_list = json_all.get('videoList') or []
|
||||||
@@ -161,10 +160,10 @@ class AcFunBangumiIE(AcFunVideoBaseIE):
|
|||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
ac_idx = parse_qs(url).get('ac', [None])[-1]
|
ac_idx = parse_qs(url).get('ac', [None])[-1]
|
||||||
video_id = f'{video_id}{format_field(ac_idx, template="__%s")}'
|
video_id = f'{video_id}{format_field(ac_idx, None, "__%s")}'
|
||||||
|
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
json_bangumi_data = self._search_json(r'window.bangumiData\s*=\s*', webpage, 'bangumiData', video_id)
|
json_bangumi_data = self._search_json(r'window.bangumiData\s*=', webpage, 'bangumiData', video_id)
|
||||||
|
|
||||||
if ac_idx:
|
if ac_idx:
|
||||||
video_info = json_bangumi_data['hlVideoInfo']
|
video_info = json_bangumi_data['hlVideoInfo']
|
||||||
@@ -181,7 +180,7 @@ def _real_extract(self, url):
|
|||||||
if v.get('id') == season_id), 1)
|
if v.get('id') == season_id), 1)
|
||||||
|
|
||||||
json_bangumi_list = self._search_json(
|
json_bangumi_list = self._search_json(
|
||||||
r'window\.bangumiList\s*=\s*', webpage, 'bangumiList', video_id, fatal=False)
|
r'window\.bangumiList\s*=', webpage, 'bangumiList', video_id, fatal=False)
|
||||||
video_internal_id = int_or_none(traverse_obj(json_bangumi_data, ('currentVideoInfo', 'id')))
|
video_internal_id = int_or_none(traverse_obj(json_bangumi_data, ('currentVideoInfo', 'id')))
|
||||||
episode_number = video_internal_id and next((
|
episode_number = video_internal_id and next((
|
||||||
idx for idx, v in enumerate(json_bangumi_list.get('items') or [], 1)
|
idx for idx, v in enumerate(json_bangumi_list.get('items') or [], 1)
|
||||||
|
|||||||
@@ -28,30 +28,34 @@
|
|||||||
|
|
||||||
|
|
||||||
class ADNIE(InfoExtractor):
|
class ADNIE(InfoExtractor):
|
||||||
IE_DESC = 'Anime Digital Network'
|
IE_DESC = 'Animation Digital Network'
|
||||||
_VALID_URL = r'https?://(?:www\.)?animedigitalnetwork\.fr/video/[^/]+/(?P<id>\d+)'
|
_VALID_URL = r'https?://(?:www\.)?(?:animation|anime)digitalnetwork\.fr/video/[^/]+/(?P<id>\d+)'
|
||||||
_TEST = {
|
_TESTS = [{
|
||||||
'url': 'http://animedigitalnetwork.fr/video/blue-exorcist-kyoto-saga/7778-episode-1-debut-des-hostilites',
|
'url': 'https://animationdigitalnetwork.fr/video/fruits-basket/9841-episode-1-a-ce-soir',
|
||||||
'md5': '0319c99885ff5547565cacb4f3f9348d',
|
'md5': '1c9ef066ceb302c86f80c2b371615261',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '7778',
|
'id': '9841',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Blue Exorcist - Kyôto Saga - Episode 1',
|
'title': 'Fruits Basket - Episode 1',
|
||||||
'description': 'md5:2f7b5aa76edbc1a7a92cedcda8a528d5',
|
'description': 'md5:14be2f72c3c96809b0ca424b0097d336',
|
||||||
'series': 'Blue Exorcist - Kyôto Saga',
|
'series': 'Fruits Basket',
|
||||||
'duration': 1467,
|
'duration': 1437,
|
||||||
'release_date': '20170106',
|
'release_date': '20190405',
|
||||||
'comment_count': int,
|
'comment_count': int,
|
||||||
'average_rating': float,
|
'average_rating': float,
|
||||||
'season_number': 2,
|
'season_number': 1,
|
||||||
'episode': 'Début des hostilités',
|
'episode': 'À ce soir !',
|
||||||
'episode_number': 1,
|
'episode_number': 1,
|
||||||
}
|
},
|
||||||
}
|
'skip': 'Only available in region (FR, ...)',
|
||||||
|
}, {
|
||||||
|
'url': 'http://animedigitalnetwork.fr/video/blue-exorcist-kyoto-saga/7778-episode-1-debut-des-hostilites',
|
||||||
|
'only_matching': True,
|
||||||
|
}]
|
||||||
|
|
||||||
_NETRC_MACHINE = 'animedigitalnetwork'
|
_NETRC_MACHINE = 'animationdigitalnetwork'
|
||||||
_BASE_URL = 'http://animedigitalnetwork.fr'
|
_BASE = 'animationdigitalnetwork.fr'
|
||||||
_API_BASE_URL = 'https://gw.api.animedigitalnetwork.fr/'
|
_API_BASE_URL = 'https://gw.api.' + _BASE + '/'
|
||||||
_PLAYER_BASE_URL = _API_BASE_URL + 'player/'
|
_PLAYER_BASE_URL = _API_BASE_URL + 'player/'
|
||||||
_HEADERS = {}
|
_HEADERS = {}
|
||||||
_LOGIN_ERR_MESSAGE = 'Unable to log in'
|
_LOGIN_ERR_MESSAGE = 'Unable to log in'
|
||||||
@@ -75,11 +79,11 @@ def _get_subtitles(self, sub_url, video_id):
|
|||||||
if subtitle_location:
|
if subtitle_location:
|
||||||
enc_subtitles = self._download_webpage(
|
enc_subtitles = self._download_webpage(
|
||||||
subtitle_location, video_id, 'Downloading subtitles data',
|
subtitle_location, video_id, 'Downloading subtitles data',
|
||||||
fatal=False, headers={'Origin': 'https://animedigitalnetwork.fr'})
|
fatal=False, headers={'Origin': 'https://' + self._BASE})
|
||||||
if not enc_subtitles:
|
if not enc_subtitles:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# http://animedigitalnetwork.fr/components/com_vodvideo/videojs/adn-vjs.min.js
|
# http://animationdigitalnetwork.fr/components/com_vodvideo/videojs/adn-vjs.min.js
|
||||||
dec_subtitles = unpad_pkcs7(aes_cbc_decrypt_bytes(
|
dec_subtitles = unpad_pkcs7(aes_cbc_decrypt_bytes(
|
||||||
compat_b64decode(enc_subtitles[24:]),
|
compat_b64decode(enc_subtitles[24:]),
|
||||||
binascii.unhexlify(self._K + '7fac1178830cfe0c'),
|
binascii.unhexlify(self._K + '7fac1178830cfe0c'),
|
||||||
@@ -164,7 +168,7 @@ def _real_extract(self, url):
|
|||||||
}, data=b'')['token']
|
}, data=b'')['token']
|
||||||
|
|
||||||
links_url = try_get(options, lambda x: x['video']['url']) or (video_base_url + 'link')
|
links_url = try_get(options, lambda x: x['video']['url']) or (video_base_url + 'link')
|
||||||
self._K = ''.join([random.choice('0123456789abcdef') for _ in range(16)])
|
self._K = ''.join(random.choices('0123456789abcdef', k=16))
|
||||||
message = bytes_to_intlist(json.dumps({
|
message = bytes_to_intlist(json.dumps({
|
||||||
'k': self._K,
|
'k': self._K,
|
||||||
't': token,
|
't': token,
|
||||||
@@ -231,7 +235,6 @@ def _real_extract(self, url):
|
|||||||
for f in m3u8_formats:
|
for f in m3u8_formats:
|
||||||
f['language'] = 'fr'
|
f['language'] = 'fr'
|
||||||
formats.extend(m3u8_formats)
|
formats.extend(m3u8_formats)
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
video = (self._download_json(
|
video = (self._download_json(
|
||||||
self._API_BASE_URL + 'video/%s' % video_id, video_id,
|
self._API_BASE_URL + 'video/%s' % video_id, video_id,
|
||||||
|
|||||||
@@ -1344,10 +1344,15 @@
|
|||||||
'username_field': 'username',
|
'username_field': 'username',
|
||||||
'password_field': 'password',
|
'password_field': 'password',
|
||||||
},
|
},
|
||||||
|
'AlticeOne': {
|
||||||
|
'name': 'Optimum TV',
|
||||||
|
'username_field': 'j_username',
|
||||||
|
'password_field': 'j_password',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class AdobePassIE(InfoExtractor):
|
class AdobePassIE(InfoExtractor): # XXX: Conventionally, base classes should end with BaseIE/InfoExtractor
|
||||||
_SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s'
|
_SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s'
|
||||||
_USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0'
|
_USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0'
|
||||||
_MVPD_CACHE = 'ap-mvpd'
|
_MVPD_CACHE = 'ap-mvpd'
|
||||||
@@ -1705,7 +1710,7 @@ def extract_redirect_url(html, url=None, fatal=False):
|
|||||||
mso_info.get('username_field', 'username'): username,
|
mso_info.get('username_field', 'username'): username,
|
||||||
mso_info.get('password_field', 'password'): password
|
mso_info.get('password_field', 'password'): password
|
||||||
}
|
}
|
||||||
if mso_id == 'Cablevision':
|
if mso_id in ('Cablevision', 'AlticeOne'):
|
||||||
form_data['_eventId_proceed'] = ''
|
form_data['_eventId_proceed'] = ''
|
||||||
mvpd_confirm_page_res = post_form(provider_login_page_res, 'Logging in', form_data)
|
mvpd_confirm_page_res = post_form(provider_login_page_res, 'Logging in', form_data)
|
||||||
if mso_id != 'Rogers':
|
if mso_id != 'Rogers':
|
||||||
|
|||||||
@@ -70,7 +70,6 @@ def _parse_video_data(self, video_data):
|
|||||||
})
|
})
|
||||||
s3_extracted = True
|
s3_extracted = True
|
||||||
formats.append(f)
|
formats.append(f)
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
@@ -269,7 +268,6 @@ def _real_extract(self, url):
|
|||||||
'width': int_or_none(source.get('width') or None),
|
'width': int_or_none(source.get('width') or None),
|
||||||
'url': source_src,
|
'url': source_src,
|
||||||
})
|
})
|
||||||
self._sort_formats(formats)
|
|
||||||
|
|
||||||
# For both metadata and downloaded files the duration varies among
|
# For both metadata and downloaded files the duration varies among
|
||||||
# formats. I just pick the max one
|
# formats. I just pick the max one
|
||||||
|
|||||||
@@ -180,7 +180,6 @@ def _real_extract(self, url):
|
|||||||
info['subtitles'].setdefault('en', []).append({
|
info['subtitles'].setdefault('en', []).append({
|
||||||
'url': asset_url,
|
'url': asset_url,
|
||||||
})
|
})
|
||||||
self._sort_formats(info['formats'])
|
|
||||||
|
|
||||||
return info
|
return info
|
||||||
else:
|
else:
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user