mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-12-08 15:12:47 +01:00
Compare commits
1289 Commits
2022.06.29
...
2023.10.07
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4392c4680c | ||
|
|
377e85a179 | ||
|
|
03e85ea99d | ||
|
|
792f1e64f6 | ||
|
|
19c90e405b | ||
|
|
e831c80e8b | ||
|
|
0e722f2f3c | ||
|
|
47c598783c | ||
|
|
35d9cbaf96 | ||
|
|
2ad3873f0d | ||
|
|
2f2dda3a7e | ||
|
|
fbcc299bd8 | ||
|
|
48cceec1dd | ||
|
|
a9efb4b8d7 | ||
|
|
f980df734c | ||
|
|
91a670a4f7 | ||
|
|
b095fd3fa9 | ||
|
|
0730d5a966 | ||
|
|
cc8d844152 | ||
|
|
eb5bdbfa70 | ||
|
|
c54ddfba0f | ||
|
|
088add9567 | ||
|
|
de015e9307 | ||
|
|
61bdf15fc7 | ||
|
|
1eaca74bc2 | ||
|
|
92feb5654c | ||
|
|
698beb9a49 | ||
|
|
15591940ff | ||
|
|
6636021206 | ||
|
|
eaee21bf71 | ||
|
|
5ca095cbcd | ||
|
|
c2da0b5ea2 | ||
|
|
c1d71d0d9f | ||
|
|
661c9a1d02 | ||
|
|
568f080518 | ||
|
|
904a19ee93 | ||
|
|
52414d64ca | ||
|
|
2269065ad6 | ||
|
|
a5e264d74b | ||
|
|
b84fda7388 | ||
|
|
5fccabac27 | ||
|
|
21f40e75df | ||
|
|
b3febedbeb | ||
|
|
295fbb3ae3 | ||
|
|
35f9a306e6 | ||
|
|
9d6254069c | ||
|
|
b532556d0a | ||
|
|
cf11b40ac4 | ||
|
|
40999467f7 | ||
|
|
8ac5b6d96a | ||
|
|
69b03f84f8 | ||
|
|
9e68747f96 | ||
|
|
ba8e9eb2c8 | ||
|
|
20fbbd9249 | ||
|
|
81f46ac573 | ||
|
|
63e0c5748c | ||
|
|
efa2339502 | ||
|
|
58493923e9 | ||
|
|
30ba233d4c | ||
|
|
836e06d246 | ||
|
|
94389b225d | ||
|
|
9652bca1bd | ||
|
|
538d37671a | ||
|
|
2da7bcca16 | ||
|
|
eda0e415d2 | ||
|
|
20c3c9b433 | ||
|
|
635ae31f68 | ||
|
|
5367585219 | ||
|
|
308936619c | ||
|
|
5be7e97886 | ||
|
|
b4c1c408c6 | ||
|
|
23d829a342 | ||
|
|
0ce1f48bf1 | ||
|
|
ecef42c3ad | ||
|
|
a83da3717d | ||
|
|
9d376c4dae | ||
|
|
5336bf57a7 | ||
|
|
9bf14be775 | ||
|
|
cebbd33b1c | ||
|
|
069cbece9d | ||
|
|
f659e64394 | ||
|
|
7d3d658f4c | ||
|
|
98eac0e6ba | ||
|
|
6e07e4bc7e | ||
|
|
aee6b9b88c | ||
|
|
578a82e497 | ||
|
|
497bbbbd73 | ||
|
|
7b71643cc9 | ||
|
|
66cc64ff66 | ||
|
|
a006ce2b27 | ||
|
|
5d0395498d | ||
|
|
fe371dcf0b | ||
|
|
d3d81cc98f | ||
|
|
99c99c7185 | ||
|
|
c6ef553792 | ||
|
|
69dbfe01c4 | ||
|
|
2301b5c1b7 | ||
|
|
77bff23ee9 | ||
|
|
7237c8dca0 | ||
|
|
30ea88591b | ||
|
|
630a55df8d | ||
|
|
bae4834245 | ||
|
|
099fb1b35c | ||
|
|
4b3a6ef1b3 | ||
|
|
665876034c | ||
|
|
b9f2bc2dbe | ||
|
|
c2d8ee0000 | ||
|
|
56b3dc0335 | ||
|
|
d7aee8e310 | ||
|
|
59e92b1f18 | ||
|
|
1be0a96a4d | ||
|
|
fcd6a76adc | ||
|
|
7cccab79e7 | ||
|
|
ed71189781 | ||
|
|
a0de8bb860 | ||
|
|
876b70c8ed | ||
|
|
339c339fec | ||
|
|
dab87ca236 | ||
|
|
378ae9f9fb | ||
|
|
db7b054a61 | ||
|
|
db97438940 | ||
|
|
b9de629d78 | ||
|
|
a854fbec56 | ||
|
|
30b29f3715 | ||
|
|
6d6081dda1 | ||
|
|
6014355c61 | ||
|
|
f73c118035 | ||
|
|
546b2c28a1 | ||
|
|
6148833f5c | ||
|
|
8cb7fc44db | ||
|
|
3f7965105d | ||
|
|
de20687ee6 | ||
|
|
b09bd0c196 | ||
|
|
127a224606 | ||
|
|
86eeb044c2 | ||
|
|
9a04113dfb | ||
|
|
ba06d77a31 | ||
|
|
4bf912282a | ||
|
|
a15fcd299e | ||
|
|
c03a58ec99 | ||
|
|
bbeacff7fc | ||
|
|
dae349da97 | ||
|
|
95abea9a03 | ||
|
|
550e65410a | ||
|
|
39837ae319 | ||
|
|
86aea0d3a2 | ||
|
|
11de6fec9c | ||
|
|
a250b24733 | ||
|
|
25b6e8f946 | ||
|
|
e705738338 | ||
|
|
62b5c94cad | ||
|
|
e0c4db04dc | ||
|
|
81b4712bca | ||
|
|
994f7ef8e6 | ||
|
|
a264433c9f | ||
|
|
9f66247289 | ||
|
|
e57eb98222 | ||
|
|
9b16762f48 | ||
|
|
65cfa2b057 | ||
|
|
f4ea501551 | ||
|
|
af86873218 | ||
|
|
75dc8e673b | ||
|
|
71baa490eb | ||
|
|
613dbce177 | ||
|
|
bb5d84c9d2 | ||
|
|
1d3d579c21 | ||
|
|
42ded0a429 | ||
|
|
6c5211cebe | ||
|
|
2b029ca0a9 | ||
|
|
131d132da5 | ||
|
|
3d2623a898 | ||
|
|
227bf1a33b | ||
|
|
c365dba843 | ||
|
|
1b392f905d | ||
|
|
1ba6fe9db5 | ||
|
|
1bcb9fe871 | ||
|
|
8a4cd12c8f | ||
|
|
2cfe221fbb | ||
|
|
2af4eeb772 | ||
|
|
325191d0c9 | ||
|
|
bdd0b75e3f | ||
|
|
92315c0377 | ||
|
|
b03fa78345 | ||
|
|
cc0619f62d | ||
|
|
b532a34810 | ||
|
|
3121512228 | ||
|
|
f8b4bcc0a7 | ||
|
|
1ceb657bdd | ||
|
|
ad8902f616 | ||
|
|
94ed638a43 | ||
|
|
bc344cd456 | ||
|
|
906c0bdcd8 | ||
|
|
337734d4a8 | ||
|
|
fa44802809 | ||
|
|
47bcd43724 | ||
|
|
662ef1e910 | ||
|
|
6355b5f1e1 | ||
|
|
90db9a3c00 | ||
|
|
49296437a8 | ||
|
|
1cffd621cb | ||
|
|
3b7f5300c5 | ||
|
|
4dc4d8473c | ||
|
|
8776349ef6 | ||
|
|
af1fd12f67 | ||
|
|
fcbc9ed760 | ||
|
|
a2be9781fb | ||
|
|
8f05fbae2a | ||
|
|
5b4b92769a | ||
|
|
91302ed349 | ||
|
|
f393bbe724 | ||
|
|
8a8af356e3 | ||
|
|
d949c10c45 | ||
|
|
ef8509c300 | ||
|
|
5e16cf92eb | ||
|
|
f0a1ff1181 | ||
|
|
58786a10f2 | ||
|
|
e59e20744e | ||
|
|
89bed01374 | ||
|
|
de4cf77ec1 | ||
|
|
812cdfa06c | ||
|
|
cd810afe2a | ||
|
|
b4e0d75848 | ||
|
|
71dc18fa29 | ||
|
|
98cb1eda7a | ||
|
|
774aa09dd6 | ||
|
|
f2ff0f6f19 | ||
|
|
5fd8367496 | ||
|
|
0dff8e4d1e | ||
|
|
1e75d97db2 | ||
|
|
81ca451480 | ||
|
|
a4486bfc1d | ||
|
|
3f756c8c40 | ||
|
|
7f9c6a63b1 | ||
|
|
db22142f6f | ||
|
|
d7cd97e8d8 | ||
|
|
d1b2156149 | ||
|
|
42f2d40b47 | ||
|
|
1619ab3e67 | ||
|
|
84078a8b38 | ||
|
|
ad54c9130e | ||
|
|
db3ad8a676 | ||
|
|
af7585c824 | ||
|
|
02948a17d9 | ||
|
|
424f3bf033 | ||
|
|
ebe1b4e34f | ||
|
|
a35af4306d | ||
|
|
93b39cdbd9 | ||
|
|
97afb093d4 | ||
|
|
2e023649ea | ||
|
|
51a07b0dca | ||
|
|
eedda5252c | ||
|
|
5cc09c004b | ||
|
|
6f69101dc9 | ||
|
|
81c8b9bdd9 | ||
|
|
01aba2519a | ||
|
|
13ff780953 | ||
|
|
ff9b0e071f | ||
|
|
0a5d7c39e1 | ||
|
|
125ffaa173 | ||
|
|
f9213f8a2d | ||
|
|
fdd69db389 | ||
|
|
83465fc410 | ||
|
|
6daaf21092 | ||
|
|
7bcd481321 | ||
|
|
c8561c6d03 | ||
|
|
cab94a0cd8 | ||
|
|
345b4c0aed | ||
|
|
8790ea7b25 | ||
|
|
ab6057ec80 | ||
|
|
9d7fde89a4 | ||
|
|
1a2eb5bda5 | ||
|
|
f8ae441501 | ||
|
|
b4a252fba8 | ||
|
|
4f7b11cc1c | ||
|
|
d1795f4a6a | ||
|
|
44c0d66442 | ||
|
|
8213ce28a4 | ||
|
|
14a14335b2 | ||
|
|
c2b801fea5 | ||
|
|
59d9fe0831 | ||
|
|
ee0ed0338d | ||
|
|
c2a1bdb009 | ||
|
|
7f8ddebbb5 | ||
|
|
7bc9251746 | ||
|
|
4815d35c19 | ||
|
|
97d60ad8cd | ||
|
|
5ee9a7d6e1 | ||
|
|
971d901d12 | ||
|
|
12037d8b0a | ||
|
|
c91ac833ea | ||
|
|
2fb35f6004 | ||
|
|
1a7dcca378 | ||
|
|
55ed4ff734 | ||
|
|
01231feb14 | ||
|
|
f41b949a2e | ||
|
|
c35448b7b1 | ||
|
|
1c16d9df53 | ||
|
|
ecfe47973f | ||
|
|
18f8fba7c8 | ||
|
|
c2502cfed9 | ||
|
|
1fe5bf240e | ||
|
|
26c517b29c | ||
|
|
6f10cdcf7e | ||
|
|
03789976d3 | ||
|
|
dc3c44f349 | ||
|
|
937264419f | ||
|
|
372a0f3b9d | ||
|
|
4cbfa570a1 | ||
|
|
45e87ea106 | ||
|
|
dbce5afa6b | ||
|
|
f78eb41e1c | ||
|
|
b38d4c941d | ||
|
|
489f51279d | ||
|
|
2d306c03d6 | ||
|
|
f6e43d6fa9 | ||
|
|
fd5d93f704 | ||
|
|
f8f9250fe2 | ||
|
|
3459d3c5af | ||
|
|
c25cac2f8e | ||
|
|
a58182b75a | ||
|
|
4afb208cf0 | ||
|
|
5c14b21367 | ||
|
|
02312c03cf | ||
|
|
94627c5dde | ||
|
|
c6d4b82a8b | ||
|
|
17d7ca84ea | ||
|
|
bfdf144c7e | ||
|
|
c6d3f81a40 | ||
|
|
aed945e1b9 | ||
|
|
fc5a7f9b27 | ||
|
|
738c90a463 | ||
|
|
93e12ed76e | ||
|
|
6dc00acf0f | ||
|
|
daafbf49b3 | ||
|
|
3f66b6fe50 | ||
|
|
b87e01c123 | ||
|
|
08916a49c7 | ||
|
|
66468bbf49 | ||
|
|
b844a3f8b1 | ||
|
|
5caf30dbc3 | ||
|
|
4ad58667c1 | ||
|
|
edbe5b589d | ||
|
|
032de83ea9 | ||
|
|
8417f26b8a | ||
|
|
7aeda6cc9e | ||
|
|
15b2d3db1d | ||
|
|
4823ec9f46 | ||
|
|
46f1370e9a | ||
|
|
69a40e4a7f | ||
|
|
955c89584b | ||
|
|
69bec6730e | ||
|
|
23c39a4bea | ||
|
|
b73193c99a | ||
|
|
c4efa0aefe | ||
|
|
44a79958f0 | ||
|
|
665472a7de | ||
|
|
d2e84d5eb0 | ||
|
|
447afb9eaa | ||
|
|
6f2287cb18 | ||
|
|
1d7656184c | ||
|
|
f7f7a877bf | ||
|
|
c8bc203fbf | ||
|
|
21b9413cf7 | ||
|
|
ef8fb7f029 | ||
|
|
3b52a60688 | ||
|
|
c449c0655d | ||
|
|
0c7ce146e4 | ||
|
|
ddae33754a | ||
|
|
45998b3e37 | ||
|
|
2f07c4c1da | ||
|
|
b423b6a48e | ||
|
|
147e62fc58 | ||
|
|
b079c26f0a | ||
|
|
4d9280c9c8 | ||
|
|
17ba4343cf | ||
|
|
f005a35aa7 | ||
|
|
7a7b1376fb | ||
|
|
b5f61b69d4 | ||
|
|
7cf51f2191 | ||
|
|
170605840e | ||
|
|
30647668a9 | ||
|
|
ed81b74802 | ||
|
|
62beefa818 | ||
|
|
0c4e0fbcad | ||
|
|
c86e433c35 | ||
|
|
9b30cd3dfc | ||
|
|
21b5ec86c2 | ||
|
|
c16644642b | ||
|
|
04f8018a05 | ||
|
|
d669772c65 | ||
|
|
ec9311c41b | ||
|
|
78fde6e339 | ||
|
|
80b732b7a9 | ||
|
|
1ea15603d8 | ||
|
|
8f0be90ecb | ||
|
|
6a765f135c | ||
|
|
ab29e47029 | ||
|
|
e5265dc651 | ||
|
|
cbdf9408e6 | ||
|
|
2c566ed141 | ||
|
|
9c92b803fa | ||
|
|
7a6f6f2459 | ||
|
|
ea05708203 | ||
|
|
9874e82b5a | ||
|
|
84ffeb7d5e | ||
|
|
7666b93604 | ||
|
|
93e7c6995e | ||
|
|
3f7e2bd80e | ||
|
|
925936908a | ||
|
|
90c1f51206 | ||
|
|
56793f74c3 | ||
|
|
d1483ec693 | ||
|
|
979568f26e | ||
|
|
b093c38cc9 | ||
|
|
2d97d154fe | ||
|
|
c3f624ef0a | ||
|
|
52ecc33e22 | ||
|
|
26010b5cec | ||
|
|
c6786ff3ba | ||
|
|
79c77e85b7 | ||
|
|
faa0332ed6 | ||
|
|
7e35526d5b | ||
|
|
ef0848abd4 | ||
|
|
0a6918a4a1 | ||
|
|
141a8dff98 | ||
|
|
68be95bd0c | ||
|
|
ab92d8651c | ||
|
|
0f0875ed55 | ||
|
|
95a383be1b | ||
|
|
9be0fe1fd9 | ||
|
|
33b737bedf | ||
|
|
0898c5c8cc | ||
|
|
f68434cc74 | ||
|
|
baa922b5c7 | ||
|
|
9bfe0d15bd | ||
|
|
8ceb07e870 | ||
|
|
6bdb64e2a2 | ||
|
|
3ae182ad89 | ||
|
|
5cc0a8fd2e | ||
|
|
6994afc030 | ||
|
|
78bc1868ff | ||
|
|
69b2f838d3 | ||
|
|
44369c9afa | ||
|
|
c2e0fc40a7 | ||
|
|
06966cb896 | ||
|
|
e4cf7741f9 | ||
|
|
c14af7a741 | ||
|
|
9a06b7b189 | ||
|
|
216bcb66d7 | ||
|
|
460da07439 | ||
|
|
03025b6e10 | ||
|
|
071670cbea | ||
|
|
427a8fafbb | ||
|
|
607510b9f2 | ||
|
|
98ac902c49 | ||
|
|
cbfe2e5cbe | ||
|
|
cf9fd52fab | ||
|
|
80ea6d3dea | ||
|
|
1e3c2b6ec2 | ||
|
|
026435714c | ||
|
|
0181b9a1b3 | ||
|
|
e389d172b6 | ||
|
|
2a23d92d9e | ||
|
|
86cb922118 | ||
|
|
c795c39f27 | ||
|
|
7a6c8a0807 | ||
|
|
89dbf08483 | ||
|
|
e6ab678e36 | ||
|
|
ab1de9cb1e | ||
|
|
871c907454 | ||
|
|
0551511b45 | ||
|
|
c9abebb851 | ||
|
|
66aeaac9aa | ||
|
|
3588be59ce | ||
|
|
2d5cae9636 | ||
|
|
9b7a48abd1 | ||
|
|
01ddec7e66 | ||
|
|
6f4fc5660f | ||
|
|
3b479100df | ||
|
|
d4e6ef4077 | ||
|
|
c459d45dd4 | ||
|
|
8729e7b57c | ||
|
|
392389b7df | ||
|
|
eb8fd6d044 | ||
|
|
f44cb4e77b | ||
|
|
46580ced56 | ||
|
|
b404712822 | ||
|
|
1f8489cccb | ||
|
|
ed4cc4ea79 | ||
|
|
cf60522652 | ||
|
|
45db357289 | ||
|
|
8a83baaf21 | ||
|
|
7accdd9845 | ||
|
|
283a0b5bc5 | ||
|
|
22ccd5420b | ||
|
|
08ff6d59f9 | ||
|
|
4a6272c6d1 | ||
|
|
640c934823 | ||
|
|
55676fe498 | ||
|
|
354d5fca7a | ||
|
|
9344964281 | ||
|
|
bfc861a91e | ||
|
|
fe2ce85aff | ||
|
|
d21056f4cf | ||
|
|
b2e0343ba0 | ||
|
|
4815bbfc41 | ||
|
|
776d1c3f0c | ||
|
|
12647e03d4 | ||
|
|
77df20f14c | ||
|
|
29cb20bd56 | ||
|
|
d400e261cf | ||
|
|
9acf1ee25f | ||
|
|
40d77d8902 | ||
|
|
2d5a8c5db2 | ||
|
|
77d6d13646 | ||
|
|
9fddc12ab0 | ||
|
|
b38cae49e6 | ||
|
|
7f51861b18 | ||
|
|
5b28cef72d | ||
|
|
31e183557f | ||
|
|
f34804b2f9 | ||
|
|
65f6e80780 | ||
|
|
b059188383 | ||
|
|
5038f6d713 | ||
|
|
4d248e29d2 | ||
|
|
8e9fe43cd3 | ||
|
|
43a3eaf963 | ||
|
|
cc09083636 | ||
|
|
da8e2912b1 | ||
|
|
18d295c9e0 | ||
|
|
17ca19ab60 | ||
|
|
41bd0dc4d7 | ||
|
|
a0a7c01542 | ||
|
|
45b2ee6f4f | ||
|
|
a538772969 | ||
|
|
30031be974 | ||
|
|
9acca71237 | ||
|
|
d50ea3ce5a | ||
|
|
c61cf091a5 | ||
|
|
f737fb16d8 | ||
|
|
5e1a54f63e | ||
|
|
31c279a2a2 | ||
|
|
a4ad59ff2d | ||
|
|
b25d6cb963 | ||
|
|
3616300155 | ||
|
|
e4a8b1769e | ||
|
|
da880559a6 | ||
|
|
65e5c021e7 | ||
|
|
a9189510ba | ||
|
|
10fd9e6ee8 | ||
|
|
72671a212d | ||
|
|
376aa24b15 | ||
|
|
c9d14bd22a | ||
|
|
149eb0bbf3 | ||
|
|
9ebac35577 | ||
|
|
8b37c58f8b | ||
|
|
d3bb187f01 | ||
|
|
44699d10dc | ||
|
|
a9c685453f | ||
|
|
c154302c58 | ||
|
|
5712943b76 | ||
|
|
39f32f1715 | ||
|
|
365b900605 | ||
|
|
c6b657867a | ||
|
|
a4f1683221 | ||
|
|
b6795fd310 | ||
|
|
2e269bd998 | ||
|
|
78a78fa74d | ||
|
|
0ba87dd279 | ||
|
|
05799a48c7 | ||
|
|
93abb7406b | ||
|
|
b23167e754 | ||
|
|
417cdaae08 | ||
|
|
b3eaab7ca2 | ||
|
|
a31d0fa6c3 | ||
|
|
cc2389c8ac | ||
|
|
20266508dd | ||
|
|
cc13293c28 | ||
|
|
989f47b631 | ||
|
|
7d5f919bad | ||
|
|
c62e64cf01 | ||
|
|
c085cc2def | ||
|
|
7708df8da0 | ||
|
|
b85faf6ffb | ||
|
|
203a06f855 | ||
|
|
6839ae1f6d | ||
|
|
c0cd13fb1c | ||
|
|
f14c233348 | ||
|
|
768a001781 | ||
|
|
acb1042a9f | ||
|
|
f40e32fb1a | ||
|
|
e61acb40b2 | ||
|
|
7e68567e50 | ||
|
|
f7efe6dc95 | ||
|
|
b1bde57bef | ||
|
|
88426d9446 | ||
|
|
f6a765ceb5 | ||
|
|
754c84e2e4 | ||
|
|
7aefd19afe | ||
|
|
fbbb5508ea | ||
|
|
c77df98b1a | ||
|
|
d27bde9883 | ||
|
|
0fe87a8730 | ||
|
|
3b161265ad | ||
|
|
389896df85 | ||
|
|
b032ff0f03 | ||
|
|
dad2210c0c | ||
|
|
9cfdbcbf3f | ||
|
|
7543c9c99b | ||
|
|
acacb57c7e | ||
|
|
776995bc10 | ||
|
|
8b008d6254 | ||
|
|
83c4970e52 | ||
|
|
8aa0bd5d10 | ||
|
|
37e325b92f | ||
|
|
59d7de0da5 | ||
|
|
88d8928bf7 | ||
|
|
176a068cde | ||
|
|
5ab3534d44 | ||
|
|
cb73b8460c | ||
|
|
7481998b16 | ||
|
|
87ebab0615 | ||
|
|
355d781bed | ||
|
|
7287ab92f6 | ||
|
|
6becd2508c | ||
|
|
edfc7725b1 | ||
|
|
b382c1fc6a | ||
|
|
8a6b167723 | ||
|
|
253ac4ba6a | ||
|
|
84e0e33a19 | ||
|
|
ab4cbeff00 | ||
|
|
773c272d66 | ||
|
|
c3366fdfd0 | ||
|
|
5be214abed | ||
|
|
d37422f1db | ||
|
|
933ed882e9 | ||
|
|
a1d9aca338 | ||
|
|
91d54e9b99 | ||
|
|
76c3ceccfb | ||
|
|
ad68b16a1e | ||
|
|
f079514957 | ||
|
|
e9df3d42c4 | ||
|
|
d80ca5deaa | ||
|
|
1a3cd8ec35 | ||
|
|
990dd7b00f | ||
|
|
d83b0ad809 | ||
|
|
08e29b9f1f | ||
|
|
8e174ba7de | ||
|
|
05997b6e98 | ||
|
|
32a84bcf4e | ||
|
|
8300774c4a | ||
|
|
d7f9871469 | ||
|
|
13f930abc0 | ||
|
|
b23b503e22 | ||
|
|
e756f45ba0 | ||
|
|
8c53322cda | ||
|
|
193fb150b7 | ||
|
|
26fdfc3704 | ||
|
|
78d25e0b7c | ||
|
|
2a06bb4eb6 | ||
|
|
88fb942577 | ||
|
|
1cdda32998 | ||
|
|
3e01ce744a | ||
|
|
8e40b9d1ec | ||
|
|
2fb0f85868 | ||
|
|
a0e526ed4d | ||
|
|
8d1ddb0805 | ||
|
|
9bb856998b | ||
|
|
fbb7383306 | ||
|
|
ec54bd43f3 | ||
|
|
f74371a97d | ||
|
|
d5f043d127 | ||
|
|
fe74d5b592 | ||
|
|
119e40ef64 | ||
|
|
4455918e7f | ||
|
|
efa944f4bc | ||
|
|
e107c2b8cf | ||
|
|
ca2f6e14e6 | ||
|
|
c1edb853b0 | ||
|
|
2647c933b8 | ||
|
|
53006b35ea | ||
|
|
4b183d4962 | ||
|
|
3d667e0047 | ||
|
|
9a9006ba20 | ||
|
|
153e88a751 | ||
|
|
9fcd8ad1f2 | ||
|
|
6b71d186dd | ||
|
|
074b2fae90 | ||
|
|
06a9d68eb8 | ||
|
|
a4d6ead30f | ||
|
|
d1b5f3d79c | ||
|
|
da8d2de208 | ||
|
|
15e9e578c0 | ||
|
|
0ef3d47027 | ||
|
|
247c8dd4f5 | ||
|
|
032f22020c | ||
|
|
4af47a0003 | ||
|
|
9012d20b23 | ||
|
|
d61ef7f343 | ||
|
|
1c226ccdd4 | ||
|
|
8791e78ccc | ||
|
|
69f5fe45b9 | ||
|
|
0b5546c723 | ||
|
|
1fc089143c | ||
|
|
5424dbaf91 | ||
|
|
c733555106 | ||
|
|
81388c0954 | ||
|
|
df10bad267 | ||
|
|
f0f3fa028b | ||
|
|
22697a84f6 | ||
|
|
3ac5476430 | ||
|
|
e318b5b87a | ||
|
|
f549b18512 | ||
|
|
7c5e1701f6 | ||
|
|
16bed382fd | ||
|
|
3cf50fa8e9 | ||
|
|
f69b0554eb | ||
|
|
e74a3c6dcc | ||
|
|
7108221662 | ||
|
|
10dc85924a | ||
|
|
b05f0a50e0 | ||
|
|
3d79ebc8b7 | ||
|
|
b44cd29851 | ||
|
|
85a802969e | ||
|
|
72f96c5566 | ||
|
|
839e2a62ae | ||
|
|
28b8f57b4b | ||
|
|
dfc186d422 | ||
|
|
42ec478fc4 | ||
|
|
7991ae57a8 | ||
|
|
935bac1e4d | ||
|
|
c4cbd3bebd | ||
|
|
c53a18f016 | ||
|
|
71df9b7fd5 | ||
|
|
c9f5ce5118 | ||
|
|
ddf1e22d48 | ||
|
|
0e96b408b9 | ||
|
|
ba72399723 | ||
|
|
9bcfe33be7 | ||
|
|
71eb82d1b2 | ||
|
|
a9d069f5b8 | ||
|
|
48652590ec | ||
|
|
86f557b636 | ||
|
|
c0caa80515 | ||
|
|
0d95d8b00a | ||
|
|
9d52bf65ff | ||
|
|
d761dfd059 | ||
|
|
27c0f899c8 | ||
|
|
7ff2fafe47 | ||
|
|
3b021eacef | ||
|
|
f352a09778 | ||
|
|
02b2f9fa7d | ||
|
|
29ca408219 | ||
|
|
8486540257 | ||
|
|
ed027fd9d8 | ||
|
|
352e7d9873 | ||
|
|
9a0416c6a5 | ||
|
|
f5a9e9df0d | ||
|
|
f96a3fb7d3 | ||
|
|
bc87dac75f | ||
|
|
9f14daf22b | ||
|
|
784320c98c | ||
|
|
d0d74b7197 | ||
|
|
64c464a144 | ||
|
|
4de88a6a36 | ||
|
|
105bfd90f5 | ||
|
|
6368e2e639 | ||
|
|
a4894d3e25 | ||
|
|
d7b460d0e5 | ||
|
|
171a31dbe8 | ||
|
|
83cc7b8aae | ||
|
|
0a4b2f4180 | ||
|
|
a8c754cc00 | ||
|
|
bc5c2f8a2c | ||
|
|
d965856235 | ||
|
|
08270da5c3 | ||
|
|
5e39fb982e | ||
|
|
8b644025b1 | ||
|
|
7aaf4cd2a8 | ||
|
|
8522226d2f | ||
|
|
f4b2c59cfe | ||
|
|
7c8c63529e | ||
|
|
e4221b700f | ||
|
|
bd7e919a75 | ||
|
|
f7fc8d39e9 | ||
|
|
a6858cda29 | ||
|
|
17fc3dc48a | ||
|
|
3f5c216969 | ||
|
|
e72e48c53f | ||
|
|
0cf643b234 | ||
|
|
dc3028d233 | ||
|
|
4dc23a8051 | ||
|
|
495322b95b | ||
|
|
c789fb7787 | ||
|
|
ed6bec168d | ||
|
|
0d8affc17f | ||
|
|
d9df9b4919 | ||
|
|
efdc45a6ea | ||
|
|
86973308cd | ||
|
|
c61473c1d6 | ||
|
|
8fddc232bf | ||
|
|
fad689c7b6 | ||
|
|
db6fa6960c | ||
|
|
3b87f4d943 | ||
|
|
581e86b512 | ||
|
|
8196182a12 | ||
|
|
9b383177c9 | ||
|
|
fbb0ee7747 | ||
|
|
c7e4ab278a | ||
|
|
e9ce4e9250 | ||
|
|
5da08bde9e | ||
|
|
ff48fc04d0 | ||
|
|
46d09f8707 | ||
|
|
db4678e448 | ||
|
|
a349d4d641 | ||
|
|
ac8e69dd32 | ||
|
|
96b9e9cf62 | ||
|
|
cb1553e966 | ||
|
|
0d2a0ecac3 | ||
|
|
c94df4d19d | ||
|
|
728f4b5c2e | ||
|
|
8c188d5d09 | ||
|
|
e14ea7fbd9 | ||
|
|
7053aa3a48 | ||
|
|
049565df2e | ||
|
|
cc1d3bf96b | ||
|
|
5b9f253fa0 | ||
|
|
d715b0e413 | ||
|
|
6141346d18 | ||
|
|
59a0c35865 | ||
|
|
da9a60ca0d | ||
|
|
0d113603ac | ||
|
|
2e30b46fe4 | ||
|
|
68a9a450d4 | ||
|
|
ed13a772d7 | ||
|
|
78545664bf | ||
|
|
f72218c199 | ||
|
|
58fb927ebd | ||
|
|
62b8dac490 | ||
|
|
682b4524bf | ||
|
|
9da6612b0f | ||
|
|
e63faa101c | ||
|
|
497074f044 | ||
|
|
c90c5b9bdd | ||
|
|
ad97487606 | ||
|
|
e091fb92da | ||
|
|
c9bd65185c | ||
|
|
c66ed4e2e5 | ||
|
|
2530b68d44 | ||
|
|
7d61d2306e | ||
|
|
385adffcf5 | ||
|
|
0c908911f9 | ||
|
|
c13a301a94 | ||
|
|
f47cf86eff | ||
|
|
7a26ce2641 | ||
|
|
3639df54c3 | ||
|
|
a4713ba96d | ||
|
|
5318156f1c | ||
|
|
d5d1df8afd | ||
|
|
cd5df121f3 | ||
|
|
73ac0e6b85 | ||
|
|
a7ddbc0475 | ||
|
|
8fab23301c | ||
|
|
1338ae3ba3 | ||
|
|
63c547d71c | ||
|
|
814bba3933 | ||
|
|
2576d53a31 | ||
|
|
217753f4aa | ||
|
|
42a44f01c3 | ||
|
|
9b9dad119a | ||
|
|
6dca2aa66d | ||
|
|
6678a4f0b3 | ||
|
|
d51b2816e3 | ||
|
|
34f00179db | ||
|
|
5225df50cf | ||
|
|
94dc8604dd | ||
|
|
a71b812f53 | ||
|
|
c6989aa3ae | ||
|
|
a79bf78397 | ||
|
|
82fb2357d9 | ||
|
|
13b2ae29c2 | ||
|
|
36069409ec | ||
|
|
0468a3b325 | ||
|
|
d509c1f5a3 | ||
|
|
2c98d99818 | ||
|
|
226c0f3a54 | ||
|
|
ade1fa70cb | ||
|
|
4c9a1a3ba5 | ||
|
|
1d55ebabc9 | ||
|
|
f324fe8c59 | ||
|
|
866f037344 | ||
|
|
5d14b73491 | ||
|
|
540236ce11 | ||
|
|
7b0127e1e1 | ||
|
|
f99bbfc983 | ||
|
|
3b55aaac59 | ||
|
|
2e565f5bca | ||
|
|
e02e6d86db | ||
|
|
867c66ff97 | ||
|
|
f03940963e | ||
|
|
09c127ff83 | ||
|
|
aebb4f4ba7 | ||
|
|
bf2e1ec67a | ||
|
|
98d4ec1ef2 | ||
|
|
1305b659ef | ||
|
|
57fb88093e | ||
|
|
4e0511f27d | ||
|
|
304ad45a9b | ||
|
|
878eac3e2e | ||
|
|
34859e4b32 | ||
|
|
143a2ccab3 | ||
|
|
1e0daeb314 | ||
|
|
7f5b3cb8b3 | ||
|
|
c53e5cf59f | ||
|
|
c7f540ea1e | ||
|
|
12f153a827 | ||
|
|
0d887f273a | ||
|
|
4d37720a0c | ||
|
|
dd4411aac2 | ||
|
|
1d77d8ce07 | ||
|
|
a057779d5e | ||
|
|
7474e4531e | ||
|
|
d3a3d7f0cc | ||
|
|
8671f995cc | ||
|
|
4a61501db9 | ||
|
|
7244895bde | ||
|
|
177662e0f2 | ||
|
|
f48ab881f6 | ||
|
|
eb2d9504b9 | ||
|
|
8a04054647 | ||
|
|
8b7fb8b60d | ||
|
|
a83333c432 | ||
|
|
573a98d6f0 | ||
|
|
af7a5eef2f | ||
|
|
576faf00b2 | ||
|
|
81b6102d20 | ||
|
|
acf306d1f9 | ||
|
|
20a7304e4c | ||
|
|
2e0f8d4f6e | ||
|
|
7e378287c4 | ||
|
|
9cc5aed990 | ||
|
|
48f535f5f8 | ||
|
|
8dbad2a439 | ||
|
|
11398b922c | ||
|
|
dfea94f8f6 | ||
|
|
f1aae71568 | ||
|
|
a5642f2c4a | ||
|
|
10e2eb4f81 | ||
|
|
c9eba8075f | ||
|
|
9d69c4e4b4 | ||
|
|
292fdad297 | ||
|
|
c04cc2e28e | ||
|
|
7a32c70d13 | ||
|
|
709ee21417 | ||
|
|
1fb53b946c | ||
|
|
1dd18a8808 | ||
|
|
0a5095fe8d | ||
|
|
0f60ba6e65 | ||
|
|
1534aba865 | ||
|
|
0ca0f88121 | ||
|
|
0500ee3d81 | ||
|
|
46a5b335e7 | ||
|
|
914491b8e0 | ||
|
|
ab029d7e92 | ||
|
|
0bd5a039ea | ||
|
|
5c8b2ee9ec | ||
|
|
faf7863bb0 | ||
|
|
d42763a443 | ||
|
|
3c757d5ed2 | ||
|
|
f55523cfdd | ||
|
|
32972518da | ||
|
|
2e7675489f | ||
|
|
80eb0bd9b9 | ||
|
|
4cca2eb1bf | ||
|
|
1c09783f7a | ||
|
|
163281178a | ||
|
|
2fa669f759 | ||
|
|
8ca48a1a54 | ||
|
|
b27bc13af6 | ||
|
|
f7c5a5e967 | ||
|
|
fada8272b6 | ||
|
|
46d72cd2c7 | ||
|
|
19b4e59a1e | ||
|
|
dab284f80f | ||
|
|
9665f15a96 | ||
|
|
2b24afa6d7 | ||
|
|
3166e6840c | ||
|
|
8817a80d3a | ||
|
|
5736d79172 | ||
|
|
fc2ba496fd | ||
|
|
2b9d02167f | ||
|
|
2314b4d89f | ||
|
|
1060f82f89 | ||
|
|
22df97f9c5 | ||
|
|
9c935fbc72 | ||
|
|
deae7c1711 | ||
|
|
941e881e1f | ||
|
|
0cb0fdbbfe | ||
|
|
0831d95c46 | ||
|
|
c26f9b991a | ||
|
|
0c0b78b273 | ||
|
|
3ffb2f5bea | ||
|
|
ae1035646a | ||
|
|
1015ceeeaf | ||
|
|
17ffed1842 | ||
|
|
be9c0884d7 | ||
|
|
48c8424bd9 | ||
|
|
7657ec7ed6 | ||
|
|
07a1250e0e | ||
|
|
69082b38dc | ||
|
|
aa824dd10b | ||
|
|
a12d03e15d | ||
|
|
1a7c9fad9f | ||
|
|
3c7a276234 | ||
|
|
d6f8871964 | ||
|
|
5469a4ab11 | ||
|
|
2c475e48b5 | ||
|
|
7c6eb424d3 | ||
|
|
adba24d207 | ||
|
|
5d7c7d6569 | ||
|
|
d2c8aadf79 | ||
|
|
1ac7f46184 | ||
|
|
05deb747bb | ||
|
|
b505e8517a | ||
|
|
f2e9fa3ef7 | ||
|
|
50a399326f | ||
|
|
1ff88b7aec | ||
|
|
825d3ce386 | ||
|
|
92aa6d6883 | ||
|
|
b2a4db425b | ||
|
|
de49cdbe9d | ||
|
|
9f9c85dda4 | ||
|
|
11734714c2 | ||
|
|
b86ca447ce | ||
|
|
f8c7ba9984 | ||
|
|
76f2bb175d | ||
|
|
f26af78a8a | ||
|
|
bfbecd1174 | ||
|
|
9bd13fe5bb | ||
|
|
459262ac97 | ||
|
|
82ea226c61 | ||
|
|
da4db748fa | ||
|
|
e1eabd7beb | ||
|
|
d81ba7d491 | ||
|
|
5135ed3d4a | ||
|
|
c4b2df872d | ||
|
|
224b5a35f7 | ||
|
|
50ac0e5416 | ||
|
|
e0992d5558 | ||
|
|
5e01315aa1 | ||
|
|
4e4982ab5b | ||
|
|
89e4d86171 | ||
|
|
a1af516259 | ||
|
|
1d64a59547 | ||
|
|
ca7f8b8f31 | ||
|
|
164b03c486 | ||
|
|
e5458d1d88 | ||
|
|
b5e7a2e69d | ||
|
|
2516cafb28 | ||
|
|
fd404bec7e | ||
|
|
fe7866d0ed | ||
|
|
5314b52192 | ||
|
|
13db4e7b9e | ||
|
|
07275b708b | ||
|
|
b85703d11a | ||
|
|
992dc6b486 | ||
|
|
822d66e591 | ||
|
|
8d1ad6378f | ||
|
|
2d1019542a | ||
|
|
b25cac650f | ||
|
|
90a1df305b | ||
|
|
0a6b4b82e9 | ||
|
|
1704c47ba8 | ||
|
|
b76e9cedb3 | ||
|
|
48c88e088c | ||
|
|
a831c2ea90 | ||
|
|
be13a6e525 | ||
|
|
8a3da4c68c | ||
|
|
4d37d4a77c | ||
|
|
7d3b98be4c | ||
|
|
2b3e43e247 | ||
|
|
f60ef66371 | ||
|
|
25836db6be | ||
|
|
587021cd9f | ||
|
|
580ce00782 | ||
|
|
2f1a299c50 | ||
|
|
f6ca640b12 | ||
|
|
3ce2933693 | ||
|
|
c200096c03 | ||
|
|
6d3e7424bf | ||
|
|
5c6d2ef9d1 | ||
|
|
460eb9c50e | ||
|
|
9fd03a1696 | ||
|
|
55937202b7 | ||
|
|
1e4fca9a87 | ||
|
|
49b4ceaedf | ||
|
|
d711839760 | ||
|
|
48732becfe | ||
|
|
6440c45ff3 | ||
|
|
ef6342bd07 | ||
|
|
e183bb8c9b | ||
|
|
7695f5a0a7 | ||
|
|
cb7cc448c0 | ||
|
|
63be30e3e0 | ||
|
|
43cf982ac3 | ||
|
|
7e82397441 | ||
|
|
66c4afd828 | ||
|
|
0e0ce898f6 | ||
|
|
a6125983ab | ||
|
|
8f84770acd | ||
|
|
62b58c0936 | ||
|
|
8f53dc44a0 | ||
|
|
1cddfdc52b | ||
|
|
cea4b857f0 | ||
|
|
ffcd62c289 | ||
|
|
a1c5bd82ec | ||
|
|
5da42f2b9b | ||
|
|
1155ecef29 | ||
|
|
96623ab5c6 | ||
|
|
7e798d725e | ||
|
|
8420a4d063 | ||
|
|
b5e9a641f5 | ||
|
|
c220d9efc8 | ||
|
|
81e0195998 | ||
|
|
f1e2d4a9a2 | ||
|
|
3157158f76 | ||
|
|
16d4535abc | ||
|
|
2a5e5477bc | ||
|
|
e251986cbe | ||
|
|
f0ad6f8c51 | ||
|
|
70b2340909 | ||
|
|
115add4387 | ||
|
|
c4b6c5c7c9 | ||
|
|
c7dcf0b31e | ||
|
|
298d9c0e89 | ||
|
|
a416623436 | ||
|
|
b8ed0f15d4 | ||
|
|
22b22b7d5c | ||
|
|
1f6b90ed8d | ||
|
|
a3e9642116 | ||
|
|
43aebb7db4 | ||
|
|
061a17abd3 | ||
|
|
d380fc1614 | ||
|
|
ad26f15a06 | ||
|
|
aeaf905e22 | ||
|
|
97d9c79e92 | ||
|
|
f62f553d46 | ||
|
|
989a01c261 | ||
|
|
05e2243e80 | ||
|
|
4080efeb01 | ||
|
|
fc61aff41b | ||
|
|
fe0918bb65 | ||
|
|
b99ba3df09 | ||
|
|
7356a44443 | ||
|
|
a0c830f488 | ||
|
|
a6ca61d427 | ||
|
|
d8657ff76f | ||
|
|
5770293d25 | ||
|
|
0647d9251f | ||
|
|
be5c1ae862 | ||
|
|
bfd973ece3 | ||
|
|
1e8fe57e5c | ||
|
|
f14a2d8382 | ||
|
|
5fff2e576f | ||
|
|
f2e8dbcc00 | ||
|
|
8f97a15d1c | ||
|
|
47304e07dc | ||
|
|
565a4c5944 | ||
|
|
2ebe6fefbe | ||
|
|
5f2a7f7c4a | ||
|
|
30389593c2 | ||
|
|
d4ada3574e | ||
|
|
e1bd953f45 | ||
|
|
98a60600b2 | ||
|
|
e325a21a1f | ||
|
|
3df4f81dfe | ||
|
|
31b532a1f2 | ||
|
|
daef791100 | ||
|
|
a6bcaf71fc | ||
|
|
4f04be6add | ||
|
|
8dc5930511 | ||
|
|
b4daacb4ec | ||
|
|
6a7d3a0a09 | ||
|
|
c646d76f67 | ||
|
|
07b47084ba | ||
|
|
4f547d6d2c | ||
|
|
2eae7d507c | ||
|
|
1cdf69c57e | ||
|
|
b6cd135ac2 | ||
|
|
befcac11a0 | ||
|
|
7f71cee020 | ||
|
|
db5f248204 | ||
|
|
871a8929bc | ||
|
|
edebb65170 | ||
|
|
f640e42ffa | ||
|
|
59f63c8f0f | ||
|
|
bfbb5a1bb1 | ||
|
|
051d6b450c | ||
|
|
67685a541d | ||
|
|
964b5493a4 | ||
|
|
3955b20703 | ||
|
|
f1042989c1 | ||
|
|
e2884db36a | ||
|
|
2c646fe42c | ||
|
|
693f060040 | ||
|
|
3bec830a59 | ||
|
|
7d0f6f0c45 | ||
|
|
26bafe7028 | ||
|
|
0cd2810379 | ||
|
|
0f7247f88e | ||
|
|
2dc4970e08 | ||
|
|
4f08e58655 | ||
|
|
dcbf7394ab | ||
|
|
c40f327a16 | ||
|
|
81bf0943ea | ||
|
|
b79f9e302d | ||
|
|
bc83b4b06c | ||
|
|
8ef5af1942 | ||
|
|
6929b41a21 | ||
|
|
0b5583b112 | ||
|
|
135f05ef66 | ||
|
|
c6e07cf1e1 | ||
|
|
ce7f6aa660 | ||
|
|
1765c6039e | ||
|
|
fbb888a3d5 | ||
|
|
2aab569f1c | ||
|
|
2e2c60c4ba | ||
|
|
306770819e | ||
|
|
dfa6661e0f | ||
|
|
24093d52a7 | ||
|
|
f5e438a976 | ||
|
|
d08e1e6875 | ||
|
|
956f1cf805 | ||
|
|
129dfa5f45 | ||
|
|
3df6a603e4 | ||
|
|
a7dc6a89f6 | ||
|
|
5200976949 | ||
|
|
e3e606de12 | ||
|
|
88f60feb32 | ||
|
|
a904a7f8c6 | ||
|
|
49afc1d84a | ||
|
|
6edf28081f | ||
|
|
5f2da312fa | ||
|
|
eb2333bce1 | ||
|
|
660c0c4efd | ||
|
|
fe588ce8ef | ||
|
|
26b92a919d | ||
|
|
8f47b39b27 | ||
|
|
2f1b7afe32 | ||
|
|
dd634acd71 | ||
|
|
ebf99aaf70 | ||
|
|
cbd4f237b4 | ||
|
|
418bbfd722 | ||
|
|
45e8a04e48 | ||
|
|
0f44636597 | ||
|
|
7a7eeb1005 | ||
|
|
4e7f375c94 | ||
|
|
f5ea47488a | ||
|
|
134c913cca | ||
|
|
56b5b832bf | ||
|
|
cb794ee010 | ||
|
|
6d645b5577 | ||
|
|
563e0bf82a | ||
|
|
d816f61fbf | ||
|
|
4019bf0525 | ||
|
|
65ea4cba29 | ||
|
|
17a23f0930 | ||
|
|
258d88f301 | ||
|
|
a3fb1ca5ab | ||
|
|
1275aeb955 | ||
|
|
170a031386 | ||
|
|
65493f64e1 | ||
|
|
63e66cd0ad | ||
|
|
f2df407165 | ||
|
|
ca9def714a | ||
|
|
47cdc68e03 | ||
|
|
7b84d6f9b3 | ||
|
|
12a1b2254d | ||
|
|
6154438178 | ||
|
|
168bbc4f38 | ||
|
|
a3976e0760 | ||
|
|
385f7f3895 | ||
|
|
5c0dc6e603 | ||
|
|
284a60c516 | ||
|
|
44f14eb43e | ||
|
|
ca9f1df253 | ||
|
|
a63b35a60c | ||
|
|
28cdb605aa | ||
|
|
5b836d4739 |
69
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
69
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -1,7 +1,14 @@
|
|||||||
name: Broken site
|
name: Broken site support
|
||||||
description: Report broken or misfunctioning site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,15 +16,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,37 +33,47 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2022.06.29 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.06.29)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -2,6 +2,13 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,13 +18,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,8 +33,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -43,31 +50,42 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide any additional information
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2022.06.29 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.06.29)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
@@ -2,6 +2,13 @@ name: Site feature request
|
|||||||
description: Request a new functionality for a supported site
|
description: Request a new functionality for a supported site
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,11 +18,11 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm requesting a site-specific feature
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -24,8 +31,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -39,33 +46,42 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
It should start like this:
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2022.06.29 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.06.29)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
62
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
62
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -1,7 +1,14 @@
|
|||||||
name: Bug report
|
name: Core bug report
|
||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -11,46 +18,55 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2022.06.29 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2022.06.29)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
|||||||
56
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
56
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -2,6 +2,13 @@ name: Feature request
|
|||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new functionality unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -13,41 +20,48 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2021.12.01)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
64
.github/ISSUE_TEMPLATE/6_question.yml
vendored
64
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -2,6 +2,19 @@ name: Ask question
|
|||||||
description: Ask yt-dlp related question
|
description: Ask yt-dlp related question
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
||||||
|
required: true
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||||
|
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||||
|
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -13,43 +26,48 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2022.06.29** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **2023.10.07** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
label: Question
|
label: Please make sure the question is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Ask your question in an arbitrary form.
|
placeholder: Provide any additional information and as much context and examples as possible
|
||||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information and as much context and examples as possible.
|
|
||||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
|
||||||
If you are in doubt if this is the right template, use another template!
|
|
||||||
placeholder: WRITE QUESTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: log
|
id: log
|
||||||
attributes:
|
attributes:
|
||||||
label: Verbose log
|
label: Complete Verbose Output
|
||||||
description: |
|
description: |
|
||||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
It should start like this:
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
[debug] Portable config file: yt-dlp.conf
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
[debug] Portable config: ['-i']
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
[debug] yt-dlp version 2023.10.07 [9d339c4] (win32_exe)
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
yt-dlp is up to date (2021.12.01)
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: 2023.10.07, Current version: 2023.10.07
|
||||||
|
yt-dlp is up to date (2023.10.07)
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
|||||||
47
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
47
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
name: Broken site
|
name: Broken site support
|
||||||
description: Report broken or misfunctioning site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -9,15 +10,15 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||||
options:
|
options:
|
||||||
- label: I'm reporting a broken site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,38 +27,14 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -15,9 +16,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -26,8 +27,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -43,32 +44,9 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide any additional information
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Site feature request
|
|||||||
description: Request a new functionality for a supported site
|
description: Request a new functionality for a supported site
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -15,7 +16,7 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
@@ -24,8 +25,8 @@ body:
|
|||||||
id: region
|
id: region
|
||||||
attributes:
|
attributes:
|
||||||
label: Region
|
label: Region
|
||||||
description: "Enter the region the site is accessible from"
|
description: Enter the country/region that the site is accessible from
|
||||||
placeholder: "India"
|
placeholder: India
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: example-urls
|
id: example-urls
|
||||||
attributes:
|
attributes:
|
||||||
@@ -39,34 +40,9 @@ body:
|
|||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
|
||||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
40
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
40
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
name: Bug report
|
name: Core bug report
|
||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -15,43 +16,18 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your issue in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version %(version)s (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (%(version)s)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
%(verbose)s
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ name: Feature request
|
|||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new functionality unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -15,39 +16,16 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Provide a description that is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Provide an explanation of your site feature request in an arbitrary form.
|
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
|
||||||
placeholder: WRITE DESCRIPTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
%(verbose_optional)s
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (2021.12.01)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
|
|||||||
42
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
42
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -2,6 +2,13 @@ name: Ask question
|
|||||||
description: Ask yt-dlp related question
|
description: Ask yt-dlp related question
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
|
%(no_skip)s
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||||
|
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||||
|
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@@ -15,41 +22,16 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
label: Question
|
label: Please make sure the question is worded well enough to be understood
|
||||||
description: |
|
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||||
Ask your question in an arbitrary form.
|
placeholder: Provide any additional information and as much context and examples as possible
|
||||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
|
||||||
Provide any additional information and as much context and examples as possible.
|
|
||||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
|
||||||
If you are in doubt if this is the right template, use another template!
|
|
||||||
placeholder: WRITE QUESTION HERE
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
%(verbose_optional)s
|
||||||
id: log
|
|
||||||
attributes:
|
|
||||||
label: Verbose log
|
|
||||||
description: |
|
|
||||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
|
||||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
|
||||||
It should look similar to this:
|
|
||||||
placeholder: |
|
|
||||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
|
||||||
[debug] Portable config file: yt-dlp.conf
|
|
||||||
[debug] Portable config: ['-i']
|
|
||||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
|
||||||
[debug] yt-dlp version 2021.12.01 (exe)
|
|
||||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
|
||||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
|
||||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
|
||||||
[debug] Proxy map: {}
|
|
||||||
yt-dlp is up to date (2021.12.01)
|
|
||||||
<more lines>
|
|
||||||
render: shell
|
|
||||||
|
|||||||
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,5 +1,23 @@
|
|||||||
|
**IMPORTANT**: PRs without the template will be CLOSED
|
||||||
|
|
||||||
|
### Description of your *pull request* and other information
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
# Please follow the guide below
|
|
||||||
|
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
ADD DESCRIPTION HERE
|
||||||
|
|
||||||
|
Fixes #
|
||||||
|
|
||||||
|
|
||||||
|
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||||
|
|
||||||
|
<!--
|
||||||
|
|
||||||
|
# PLEASE FOLLOW THE GUIDE BELOW
|
||||||
|
|
||||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||||
@@ -12,7 +30,7 @@
|
|||||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||||
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
|
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
|
||||||
|
|
||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
||||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
|
||||||
@@ -22,8 +40,10 @@
|
|||||||
- [ ] Core bug fix/improvement
|
- [ ] Core bug fix/improvement
|
||||||
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Description of your *pull request* and other information
|
<!-- Do NOT edit/remove anything below this! -->
|
||||||
|
</details><details><summary>Copilot Summary</summary>
|
||||||
|
|
||||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible.
|
copilot:all
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|||||||
767
.github/workflows/build.yml
vendored
767
.github/workflows/build.yml
vendored
@@ -1,471 +1,428 @@
|
|||||||
name: Build
|
name: Build Artifacts
|
||||||
on: workflow_dispatch
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
required: false
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
meta_files:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: Version tag (YYYY.MM.DD[.REV])
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
description: Update channel (stable/nightly/...)
|
||||||
|
required: true
|
||||||
|
default: stable
|
||||||
|
type: string
|
||||||
|
unix:
|
||||||
|
description: yt-dlp, yt-dlp.tar.gz, yt-dlp_linux, yt-dlp_linux.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_arm:
|
||||||
|
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos:
|
||||||
|
description: yt-dlp_macos, yt-dlp_macos.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
macos_legacy:
|
||||||
|
description: yt-dlp_macos_legacy
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows:
|
||||||
|
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
windows32:
|
||||||
|
description: yt-dlp_x86.exe
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
meta_files:
|
||||||
|
description: SHA2-256SUMS, SHA2-512SUMS, _update_spec
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create_release:
|
unix:
|
||||||
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
|
||||||
version_suffix: ${{ steps.version_suffix.outputs.version_suffix }}
|
|
||||||
ytdlp_version: ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
release_id: ${{ steps.create_release.outputs.id }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
with:
|
- uses: actions/setup-python@v4
|
||||||
fetch-depth: 0
|
with:
|
||||||
- uses: actions/setup-python@v2
|
python-version: "3.10"
|
||||||
with:
|
- uses: conda-incubator/setup-miniconda@v2
|
||||||
python-version: '3.10'
|
with:
|
||||||
|
miniforge-variant: Mambaforge
|
||||||
|
use-mamba: true
|
||||||
|
channels: conda-forge
|
||||||
|
auto-update-conda: true
|
||||||
|
activate-environment: ""
|
||||||
|
auto-activate-base: false
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
sudo apt-get -y install zip pandoc man sed
|
||||||
|
python -m pip install -U pip setuptools wheel
|
||||||
|
python -m pip install -U Pyinstaller -r requirements.txt
|
||||||
|
reqs=$(mktemp)
|
||||||
|
cat > $reqs << EOF
|
||||||
|
python=3.10.*
|
||||||
|
pyinstaller
|
||||||
|
cffi
|
||||||
|
brotli-python
|
||||||
|
EOF
|
||||||
|
sed '/^brotli.*/d' requirements.txt >> $reqs
|
||||||
|
mamba create -n build --file $reqs
|
||||||
|
|
||||||
- name: Set version suffix
|
- name: Prepare
|
||||||
id: version_suffix
|
run: |
|
||||||
env:
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
|
||||||
if: "env.PUSH_VERSION_COMMIT == ''"
|
|
||||||
run: echo ::set-output name=version_suffix::$(date -u +"%H%M%S")
|
|
||||||
- name: Bump version
|
|
||||||
id: bump_version
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py ${{ steps.version_suffix.outputs.version_suffix }}
|
|
||||||
make issuetemplates
|
|
||||||
|
|
||||||
- name: Push to release
|
|
||||||
id: push_release
|
|
||||||
run: |
|
|
||||||
git config --global user.name github-actions
|
|
||||||
git config --global user.email github-actions@example.com
|
|
||||||
git add -u
|
|
||||||
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
|
||||||
echo ::set-output name=head_sha::$(git rev-parse HEAD)
|
|
||||||
- name: Update master
|
|
||||||
env:
|
|
||||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
|
||||||
if: "env.PUSH_VERSION_COMMIT != ''"
|
|
||||||
run: git push origin ${{ github.event.ref }}
|
|
||||||
- name: Get Changelog
|
|
||||||
run: |
|
|
||||||
changelog=$(grep -oPz '(?s)(?<=### ${{ steps.bump_version.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)' Changelog.md) || true
|
|
||||||
echo "changelog<<EOF" >> $GITHUB_ENV
|
|
||||||
echo "$changelog" >> $GITHUB_ENV
|
|
||||||
echo "EOF" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
id: create_release
|
|
||||||
uses: actions/create-release@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
tag_name: ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
release_name: yt-dlp ${{ steps.bump_version.outputs.ytdlp_version }}
|
|
||||||
commitish: ${{ steps.push_release.outputs.head_sha }}
|
|
||||||
draft: true
|
|
||||||
prerelease: false
|
|
||||||
body: |
|
|
||||||
#### [A description of the various files]((https://github.com/yt-dlp/yt-dlp#release-files)) are in the README
|
|
||||||
|
|
||||||
---
|
|
||||||
<details open><summary><h3>Changelog</summary>
|
|
||||||
<p>
|
|
||||||
|
|
||||||
${{ env.changelog }}
|
|
||||||
|
|
||||||
</p>
|
|
||||||
</details>
|
|
||||||
|
|
||||||
|
|
||||||
build_unix:
|
|
||||||
needs: create_release
|
|
||||||
runs-on: ubuntu-18.04 # Standalone executable should be built on minimum supported OS
|
|
||||||
outputs:
|
|
||||||
sha256_bin: ${{ steps.get_sha.outputs.sha256_bin }}
|
|
||||||
sha512_bin: ${{ steps.get_sha.outputs.sha512_bin }}
|
|
||||||
sha256_tar: ${{ steps.get_sha.outputs.sha256_tar }}
|
|
||||||
sha512_tar: ${{ steps.get_sha.outputs.sha512_tar }}
|
|
||||||
sha256_linux: ${{ steps.get_sha.outputs.sha256_linux }}
|
|
||||||
sha512_linux: ${{ steps.get_sha.outputs.sha512_linux }}
|
|
||||||
sha256_linux_zip: ${{ steps.get_sha.outputs.sha256_linux_zip }}
|
|
||||||
sha512_linux_zip: ${{ steps.get_sha.outputs.sha512_linux_zip }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
- name: Install Requirements
|
|
||||||
run: |
|
|
||||||
sudo apt-get -y install zip pandoc man
|
|
||||||
python -m pip install --upgrade pip setuptools wheel twine
|
|
||||||
python -m pip install Pyinstaller -r requirements.txt
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
|
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build Unix executables
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all tar
|
||||||
|
- name: Build Unix standalone binary
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: |
|
||||||
|
unset LD_LIBRARY_PATH # Harmful; set by setup-python
|
||||||
|
conda activate build
|
||||||
python pyinst.py --onedir
|
python pyinst.py --onedir
|
||||||
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
- name: Get SHA2-SUMS
|
mv ./dist/yt-dlp_linux ./yt-dlp_linux
|
||||||
id: get_sha
|
mv ./dist/yt-dlp_linux.zip ./yt-dlp_linux.zip
|
||||||
run: |
|
|
||||||
echo "::set-output name=sha256_bin::$(sha256sum yt-dlp | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_bin::$(sha512sum yt-dlp | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha256_tar::$(sha256sum yt-dlp.tar.gz | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_tar::$(sha512sum yt-dlp.tar.gz | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha256_linux::$(sha256sum dist/yt-dlp_linux | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_linux::$(sha512sum dist/yt-dlp_linux | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha256_linux_zip::$(sha256sum dist/yt-dlp_linux.zip | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_linux_zip::$(sha512sum dist/yt-dlp_linux.zip | awk '{print $1}')"
|
|
||||||
|
|
||||||
- name: Upload zip binary
|
- name: Verify --update-to
|
||||||
uses: actions/upload-release-asset@v1
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
env:
|
run: |
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
binaries=("yt-dlp" "yt-dlp_linux")
|
||||||
with:
|
for binary in "${binaries[@]}"; do
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
chmod +x ./${binary}
|
||||||
asset_path: ./yt-dlp
|
cp ./${binary} ./${binary}_downgraded
|
||||||
asset_name: yt-dlp
|
version="$(./${binary} --version)"
|
||||||
asset_content_type: application/octet-stream
|
./${binary}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
- name: Upload Source tar
|
downgraded_version="$(./${binary}_downgraded --version)"
|
||||||
uses: actions/upload-release-asset@v1
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
env:
|
done
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./yt-dlp.tar.gz
|
|
||||||
asset_name: yt-dlp.tar.gz
|
|
||||||
asset_content_type: application/gzip
|
|
||||||
- name: Upload standalone binary
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_linux
|
|
||||||
asset_name: yt-dlp_linux
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
- name: Upload onedir binary
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_linux.zip
|
|
||||||
asset_name: yt-dlp_linux.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
- name: Build and publish on PyPi
|
- name: Upload artifacts
|
||||||
env:
|
uses: actions/upload-artifact@v3
|
||||||
TWINE_USERNAME: __token__
|
with:
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
path: |
|
||||||
if: "env.TWINE_PASSWORD != ''"
|
yt-dlp
|
||||||
run: |
|
yt-dlp.tar.gz
|
||||||
rm -rf dist/*
|
yt-dlp_linux
|
||||||
python setup.py sdist bdist_wheel
|
yt-dlp_linux.zip
|
||||||
twine upload dist/*
|
|
||||||
|
|
||||||
- name: Install SSH private key for Homebrew
|
linux_arm:
|
||||||
env:
|
if: inputs.linux_arm
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
permissions:
|
||||||
if: "env.BREW_TOKEN != ''"
|
contents: read
|
||||||
uses: yt-dlp/ssh-agent@v0.5.3
|
packages: write # for creating cache
|
||||||
with:
|
runs-on: ubuntu-latest
|
||||||
ssh-private-key: ${{ env.BREW_TOKEN }}
|
strategy:
|
||||||
- name: Update Homebrew Formulae
|
matrix:
|
||||||
env:
|
architecture:
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
- armv7
|
||||||
if: "env.BREW_TOKEN != ''"
|
- aarch64
|
||||||
run: |
|
|
||||||
git clone git@github.com:yt-dlp/homebrew-taps taps/
|
|
||||||
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.create_release.outputs.ytdlp_version }}"
|
|
||||||
git -C taps/ config user.name github-actions
|
|
||||||
git -C taps/ config user.email github-actions@example.com
|
|
||||||
git -C taps/ commit -am 'yt-dlp: ${{ needs.create_release.outputs.ytdlp_version }}'
|
|
||||||
git -C taps/ push
|
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
path: ./repo
|
||||||
|
- name: Virtualized Install, Prepare & Build
|
||||||
|
uses: yt-dlp/run-on-arch-action@v2
|
||||||
|
with:
|
||||||
|
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||||
|
env: |
|
||||||
|
GITHUB_WORKFLOW: build
|
||||||
|
githubToken: ${{ github.token }} # To cache image
|
||||||
|
arch: ${{ matrix.architecture }}
|
||||||
|
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||||
|
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||||
|
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||||
|
apt update
|
||||||
|
apt -y install zlib1g-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||||
|
python3.8 -m pip install -U pip setuptools wheel
|
||||||
|
# Cannot access requirements.txt from the repo directory at this stage
|
||||||
|
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi
|
||||||
|
|
||||||
build_macos:
|
run: |
|
||||||
|
cd repo
|
||||||
|
python3.8 -m pip install -U Pyinstaller -r requirements.txt # Cached version may be out of date
|
||||||
|
python3.8 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
|
python3.8 devscripts/make_lazy_extractors.py
|
||||||
|
python3.8 pyinst.py
|
||||||
|
|
||||||
|
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||||
|
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||||
|
chmod +x ./dist/yt-dlp_linux_${arch}
|
||||||
|
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
||||||
|
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
||||||
|
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
||||||
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: | # run-on-arch-action designates armv7l as armv7
|
||||||
|
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||||
|
|
||||||
|
macos:
|
||||||
|
if: inputs.macos
|
||||||
runs-on: macos-11
|
runs-on: macos-11
|
||||||
needs: create_release
|
|
||||||
outputs:
|
|
||||||
sha256_macos: ${{ steps.get_sha.outputs.sha256_macos }}
|
|
||||||
sha512_macos: ${{ steps.get_sha.outputs.sha512_macos }}
|
|
||||||
sha256_macos_zip: ${{ steps.get_sha.outputs.sha256_macos_zip }}
|
|
||||||
sha512_macos_zip: ${{ steps.get_sha.outputs.sha512_macos_zip }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
# NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
/usr/bin/python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
python3 -m pip install -U --user pip setuptools wheel
|
||||||
|
# We need to ignore wheels otherwise we break universal2 builds
|
||||||
|
python3 -m pip install -U --user --no-binary :all: Pyinstaller -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
/usr/bin/python3 devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
|
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
/usr/bin/python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
python3 pyinst.py --target-architecture universal2 --onedir
|
||||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||||
/usr/bin/python3 pyinst.py --target-architecture universal2
|
python3 pyinst.py --target-architecture universal2
|
||||||
- name: Get SHA2-SUMS
|
|
||||||
id: get_sha
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=sha256_macos::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_macos::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha256_macos_zip::$(sha256sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_macos_zip::$(sha512sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
|
||||||
|
|
||||||
- name: Upload standalone binary
|
- name: Verify --update-to
|
||||||
uses: actions/upload-release-asset@v1
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
env:
|
run: |
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
chmod +x ./dist/yt-dlp_macos
|
||||||
with:
|
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
version="$(./dist/yt-dlp_macos --version)"
|
||||||
asset_path: ./dist/yt-dlp_macos
|
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
asset_name: yt-dlp_macos
|
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||||
asset_content_type: application/octet-stream
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
- name: Upload onedir binary
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_macos.zip
|
|
||||||
asset_name: yt-dlp_macos.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_macos
|
||||||
|
dist/yt-dlp_macos.zip
|
||||||
|
|
||||||
build_macos_legacy:
|
macos_legacy:
|
||||||
|
if: inputs.macos_legacy
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
needs: create_release
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
# We need the official Python, because the GA ones only support newer macOS versions
|
# We need the official Python, because the GA ones only support newer macOS versions
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: 3.10.5
|
PYTHON_VERSION: 3.10.5
|
||||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||||
run: |
|
run: |
|
||||||
# Hack to get the latest patch version. Uncomment if needed
|
# Hack to get the latest patch version. Uncomment if needed
|
||||||
#brew install python@3.10
|
#brew install python@3.10
|
||||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||||
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
||||||
sudo installer -pkg python.pkg -target /
|
sudo installer -pkg python.pkg -target /
|
||||||
python3 --version
|
python3 --version
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
python3 -m pip install -U --user pip setuptools wheel
|
||||||
|
python3 -m pip install -U --user Pyinstaller -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
|
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python3 pyinst.py
|
python3 pyinst.py
|
||||||
- name: Get SHA2-SUMS
|
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||||
id: get_sha
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=sha256_macos_legacy::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
echo "::set-output name=sha512_macos_legacy::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
|
|
||||||
|
|
||||||
- name: Upload standalone binary
|
- name: Verify --update-to
|
||||||
uses: actions/upload-release-asset@v1
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
env:
|
run: |
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
chmod +x ./dist/yt-dlp_macos_legacy
|
||||||
with:
|
cp ./dist/yt-dlp_macos_legacy ./dist/yt-dlp_macos_legacy_downgraded
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
version="$(./dist/yt-dlp_macos_legacy --version)"
|
||||||
asset_path: ./dist/yt-dlp_macos
|
./dist/yt-dlp_macos_legacy_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
asset_name: yt-dlp_macos_legacy
|
downgraded_version="$(./dist/yt-dlp_macos_legacy_downgraded --version)"
|
||||||
asset_content_type: application/octet-stream
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_macos_legacy
|
||||||
|
|
||||||
build_windows:
|
windows:
|
||||||
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: create_release
|
|
||||||
outputs:
|
|
||||||
sha256_win: ${{ steps.get_sha.outputs.sha256_win }}
|
|
||||||
sha512_win: ${{ steps.get_sha.outputs.sha512_win }}
|
|
||||||
sha256_py2exe: ${{ steps.get_sha.outputs.sha256_py2exe }}
|
|
||||||
sha512_py2exe: ${{ steps.get_sha.outputs.sha512_py2exe }}
|
|
||||||
sha256_win_zip: ${{ steps.get_sha.outputs.sha256_win_zip }}
|
|
||||||
sha512_win_zip: ${{ steps.get_sha.outputs.sha512_win_zip }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v4
|
||||||
with: # 3.8 is used for Win7 support
|
with: # 3.8 is used for Win7 support
|
||||||
python-version: '3.8'
|
python-version: "3.8"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||||
python -m pip install --upgrade pip setuptools wheel py2exe
|
python -m pip install -U pip setuptools wheel py2exe
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python setup.py py2exe
|
python setup.py py2exe
|
||||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
python pyinst.py --onedir
|
python pyinst.py --onedir
|
||||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||||
- name: Get SHA2-SUMS
|
|
||||||
id: get_sha
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=sha256_py2exe::$((Get-FileHash dist\yt-dlp_min.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha512_py2exe::$((Get-FileHash dist\yt-dlp_min.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha256_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha512_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha256_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha512_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
- name: Upload py2exe binary
|
- name: Verify --update-to
|
||||||
uses: actions/upload-release-asset@v1
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
env:
|
run: |
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
||||||
with:
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
$version = & "./dist/${name}.exe" --version
|
||||||
asset_path: ./dist/yt-dlp_min.exe
|
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
asset_name: yt-dlp_min.exe
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
if ($version -eq $downgraded_version) {
|
||||||
- name: Upload standalone binary
|
exit 1
|
||||||
uses: actions/upload-release-asset@v1
|
}
|
||||||
env:
|
}
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp.exe
|
|
||||||
asset_name: yt-dlp.exe
|
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
|
||||||
- name: Upload onedir binary
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./dist/yt-dlp_win.zip
|
|
||||||
asset_name: yt-dlp_win.zip
|
|
||||||
asset_content_type: application/zip
|
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp.exe
|
||||||
|
dist/yt-dlp_min.exe
|
||||||
|
dist/yt-dlp_win.zip
|
||||||
|
|
||||||
build_windows32:
|
windows32:
|
||||||
|
if: inputs.windows32
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
needs: create_release
|
|
||||||
outputs:
|
|
||||||
sha256_win32: ${{ steps.get_sha.outputs.sha256_win32 }}
|
|
||||||
sha512_win32: ${{ steps.get_sha.outputs.sha512_win32 }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v4
|
||||||
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
||||||
python-version: '3.7'
|
python-version: "3.7"
|
||||||
architecture: 'x86'
|
architecture: "x86"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip setuptools wheel
|
python -m pip install -U pip setuptools wheel
|
||||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py ${{ needs.create_release.outputs.version_suffix }}
|
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python pyinst.py
|
python pyinst.py
|
||||||
- name: Get SHA2-SUMS
|
|
||||||
id: get_sha
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=sha256_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA256).Hash.ToLower())"
|
|
||||||
echo "::set-output name=sha512_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA512).Hash.ToLower())"
|
|
||||||
|
|
||||||
- name: Upload standalone binary
|
- name: Verify --update-to
|
||||||
uses: actions/upload-release-asset@v1
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
env:
|
run: |
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
foreach ($name in @("yt-dlp_x86")) {
|
||||||
with:
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
$version = & "./dist/${name}.exe" --version
|
||||||
asset_path: ./dist/yt-dlp_x86.exe
|
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
asset_name: yt-dlp_x86.exe
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
asset_content_type: application/vnd.microsoft.portable-executable
|
if ($version -eq $downgraded_version) {
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
dist/yt-dlp_x86.exe
|
||||||
|
|
||||||
finish:
|
meta_files:
|
||||||
|
if: inputs.meta_files && always() && !cancelled()
|
||||||
|
needs:
|
||||||
|
- unix
|
||||||
|
- linux_arm
|
||||||
|
- macos
|
||||||
|
- macos_legacy
|
||||||
|
- windows
|
||||||
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [create_release, build_unix, build_windows, build_windows32, build_macos, build_macos_legacy]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Make SHA2-SUMS files
|
- uses: actions/download-artifact@v3
|
||||||
run: |
|
|
||||||
echo "${{ needs.build_unix.outputs.sha256_bin }} yt-dlp" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha256_tar }} yt-dlp.tar.gz" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha256_linux }} yt-dlp_linux" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha256_linux_zip }} yt-dlp_linux.zip" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha256_win }} yt-dlp.exe" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha256_py2exe }} yt-dlp_min.exe" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_windows32.outputs.sha256_win32 }} yt-dlp_x86.exe" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha256_win_zip }} yt-dlp_win.zip" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_macos.outputs.sha256_macos }} yt-dlp_macos" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_macos.outputs.sha256_macos_zip }} yt-dlp_macos.zip" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_macos_legacy.outputs.sha256_macos_legacy }} yt-dlp_macos_legacy" >> SHA2-256SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha512_bin }} yt-dlp" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha512_tar }} yt-dlp.tar.gz" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha512_linux }} yt-dlp_linux" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_unix.outputs.sha512_linux_zip }} yt-dlp_linux.zip" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha512_win }} yt-dlp.exe" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha512_py2exe }} yt-dlp_min.exe" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_windows32.outputs.sha512_win32 }} yt-dlp_x86.exe" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_windows.outputs.sha512_win_zip }} yt-dlp_win.zip" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_macos.outputs.sha512_macos }} yt-dlp_macos" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_macos.outputs.sha512_macos_zip }} yt-dlp_macos.zip" >> SHA2-512SUMS
|
|
||||||
echo "${{ needs.build_macos_legacy.outputs.sha512_macos_legacy }} yt-dlp_macos_legacy" >> SHA2-512SUMS
|
|
||||||
|
|
||||||
- name: Upload SHA2-256SUMS file
|
- name: Make SHA2-SUMS files
|
||||||
uses: actions/upload-release-asset@v1
|
run: |
|
||||||
env:
|
cd ./artifact/
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
sha256sum * > ../SHA2-256SUMS
|
||||||
with:
|
sha512sum * > ../SHA2-512SUMS
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./SHA2-256SUMS
|
|
||||||
asset_name: SHA2-256SUMS
|
|
||||||
asset_content_type: text/plain
|
|
||||||
- name: Upload SHA2-512SUMS file
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./SHA2-512SUMS
|
|
||||||
asset_name: SHA2-512SUMS
|
|
||||||
asset_content_type: text/plain
|
|
||||||
|
|
||||||
- name: Make Update spec
|
- name: Make Update spec
|
||||||
run: |
|
run: |
|
||||||
echo "# This file is used for regulating self-update" >> _update_spec
|
cat >> _update_spec << EOF
|
||||||
- name: Upload update spec
|
# This file is used for regulating self-update
|
||||||
uses: actions/upload-release-asset@v1
|
lock 2022.08.18.36 .+ Python 3.6
|
||||||
env:
|
EOF
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create_release.outputs.upload_url }}
|
|
||||||
asset_path: ./_update_spec
|
|
||||||
asset_name: _update_spec
|
|
||||||
asset_content_type: text/plain
|
|
||||||
|
|
||||||
- name: Finalize release
|
- name: Sign checksum files
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
run: |
|
if: env.GPG_SIGNING_KEY != ''
|
||||||
gh api -X PATCH -H "Accept: application/vnd.github.v3+json" \
|
run: |
|
||||||
/repos/${{ github.repository }}/releases/${{ needs.create_release.outputs.release_id }} \
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
-F draft=false
|
for signfile in ./SHA*SUMS; do
|
||||||
|
gpg --batch --detach-sign "$signfile"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
SHA*SUMS*
|
||||||
|
_update_spec
|
||||||
|
|||||||
65
.github/workflows/codeql.yml
vendored
Normal file
65
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ 'master', 'gh-pages', 'release' ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ 'master' ]
|
||||||
|
schedule:
|
||||||
|
- cron: '59 11 * * 5'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'python' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
21
.github/workflows/core.yml
vendored
21
.github/workflows/core.yml
vendored
@@ -1,5 +1,8 @@
|
|||||||
name: Core Tests
|
name: Core Tests
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Tests
|
name: Core Tests
|
||||||
@@ -9,26 +12,30 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.9 is in quick-test
|
# CPython 3.11 is in quick-test
|
||||||
python-version: ['3.6', '3.7', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8]
|
python-version: ['3.8', '3.9', '3.10', '3.12', pypy-3.7, pypy-3.8, pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
run-tests-ext: [sh]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.8'
|
python-version: '3.7'
|
||||||
|
run-tests-ext: bat
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.12'
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: pypy-3.9
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install pytest
|
||||||
run: pip install pytest
|
run: pip install pytest
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: False
|
continue-on-error: False
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
run: |
|
||||||
# Linter is in quick-test
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
|
./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
||||||
|
|||||||
29
.github/workflows/download.yml
vendored
29
.github/workflows/download.yml
vendored
@@ -1,15 +1,34 @@
|
|||||||
name: Download Tests
|
name: Download Tests
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
quick:
|
||||||
name: Download Tests
|
name: Quick Download Tests
|
||||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install test requirements
|
||||||
|
run: pip install pytest
|
||||||
|
- name: Run tests
|
||||||
|
continue-on-error: true
|
||||||
|
run: ./devscripts/run_tests.sh download
|
||||||
|
|
||||||
|
full:
|
||||||
|
name: Full Download Tests
|
||||||
|
if: "contains(github.event.head_commit.message, 'ci run dl all')"
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.6', '3.7', '3.9', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8]
|
python-version: ['3.7', '3.10', '3.12', pypy-3.7, pypy-3.8, pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
run-tests-ext: [sh]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
@@ -20,9 +39,9 @@ jobs:
|
|||||||
python-version: pypy-3.9
|
python-version: pypy-3.9
|
||||||
run-tests-ext: bat
|
run-tests-ext: bat
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install pytest
|
||||||
|
|||||||
97
.github/workflows/publish.yml
vendored
Normal file
97
.github/workflows/publish.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
name: Publish
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
channel:
|
||||||
|
default: stable
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
target_commitish:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
prerelease:
|
||||||
|
default: false
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
secrets:
|
||||||
|
ARCHIVE_REPO_TOKEN:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
run: |
|
||||||
|
printf '%s' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/yt-dlp/yt-dlp#installation "Installation instructions") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/yt-dlp/yt-dlp/tree/2023.03.04#readme "Documentation") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||||
|
${{ inputs.channel != 'nightly' && '"[]" \
|
||||||
|
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\")"' || '' }} \
|
||||||
|
> ./RELEASE_NOTES
|
||||||
|
printf '\n\n' >> ./RELEASE_NOTES
|
||||||
|
cat >> ./RELEASE_NOTES << EOF
|
||||||
|
#### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files)
|
||||||
|
---
|
||||||
|
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||||
|
EOF
|
||||||
|
printf '%s\n\n' '**This is an automated nightly pre-release build**' >> ./NIGHTLY_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./NIGHTLY_NOTES
|
||||||
|
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}' >> ./ARCHIVE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
|
- name: Archive nightly release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
|
GH_REPO: ${{ vars.ARCHIVE_REPO }}
|
||||||
|
if: |
|
||||||
|
inputs.channel == 'nightly' && env.GH_TOKEN != '' && env.GH_REPO != ''
|
||||||
|
run: |
|
||||||
|
gh release create \
|
||||||
|
--notes-file ARCHIVE_NOTES \
|
||||||
|
--title "yt-dlp nightly ${{ inputs.version }}" \
|
||||||
|
${{ inputs.version }} \
|
||||||
|
artifact/*
|
||||||
|
|
||||||
|
- name: Prune old nightly release
|
||||||
|
if: inputs.channel == 'nightly' && !vars.ARCHIVE_REPO
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
gh release delete --yes --cleanup-tag "nightly" || true
|
||||||
|
git tag --delete "nightly" || true
|
||||||
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
|
- name: Publish release${{ inputs.channel == 'nightly' && ' (nightly)' || '' }}
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
if: (inputs.channel == 'nightly' && !vars.ARCHIVE_REPO) || inputs.channel != 'nightly'
|
||||||
|
run: |
|
||||||
|
gh release create \
|
||||||
|
--notes-file ${{ inputs.channel == 'nightly' && 'NIGHTLY_NOTES' || 'RELEASE_NOTES' }} \
|
||||||
|
--target ${{ inputs.target_commitish }} \
|
||||||
|
--title "yt-dlp ${{ inputs.channel == 'nightly' && 'nightly ' || '' }}${{ inputs.version }}" \
|
||||||
|
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||||
|
${{ inputs.channel == 'nightly' && '"nightly"' || inputs.version }} \
|
||||||
|
artifact/*
|
||||||
22
.github/workflows/quick-test.yml
vendored
22
.github/workflows/quick-test.yml
vendored
@@ -1,30 +1,32 @@
|
|||||||
name: Quick Test
|
name: Quick Test
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Test
|
name: Core Test
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- name: Set up Python 3.11
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.11'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: pip install pytest pycryptodomex
|
run: pip install pytest pycryptodomex
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: ./devscripts/run_tests.sh core
|
run: |
|
||||||
|
python3 -m yt_dlp -v || true
|
||||||
|
./devscripts/run_tests.sh core
|
||||||
flake8:
|
flake8:
|
||||||
name: Linter
|
name: Linter
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- uses: actions/setup-python@v4
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install flake8
|
- name: Install flake8
|
||||||
run: pip install flake8
|
run: pip install flake8
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
|
|||||||
52
.github/workflows/release-nightly.yml
vendored
Normal file
52
.github/workflows/release-nightly.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Release (nightly)
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "yt_dlp/**.py"
|
||||||
|
- "!yt_dlp/version.py"
|
||||||
|
concurrency:
|
||||||
|
group: release-nightly
|
||||||
|
cancel-in-progress: true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
if: vars.BUILD_NIGHTLY != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Get version
|
||||||
|
id: get_version
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py "$(date -u +"%H%M%S")" | grep -Po "version=\d+(\.\d+){3}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: nightly
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # For package cache
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
|
publish:
|
||||||
|
needs: [prepare, build]
|
||||||
|
uses: ./.github/workflows/publish.yml
|
||||||
|
secrets:
|
||||||
|
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
with:
|
||||||
|
channel: nightly
|
||||||
|
prerelease: true
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_commitish: ${{ github.sha }}
|
||||||
163
.github/workflows/release.yml
vendored
Normal file
163
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
name: Release
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: Version tag (YYYY.MM.DD[.REV])
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
channel:
|
||||||
|
description: Update channel (stable/nightly/...)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
prerelease:
|
||||||
|
description: Pre-release
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
channel: ${{ steps.set_channel.outputs.channel }}
|
||||||
|
version: ${{ steps.update_version.outputs.version }}
|
||||||
|
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Set channel
|
||||||
|
id: set_channel
|
||||||
|
run: |
|
||||||
|
CHANNEL="${{ github.repository == 'yt-dlp/yt-dlp' && 'stable' || github.repository }}"
|
||||||
|
echo "channel=${{ inputs.channel || '$CHANNEL' }}" > "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update version
|
||||||
|
id: update_version
|
||||||
|
run: |
|
||||||
|
REVISION="${{ vars.PUSH_VERSION_COMMIT == '' && '$(date -u +"%H%M%S")' || '' }}"
|
||||||
|
REVISION="${{ inputs.prerelease && '$(date -u +"%H%M%S")' || '$REVISION' }}"
|
||||||
|
python devscripts/update-version.py ${{ inputs.version || '$REVISION' }} | \
|
||||||
|
grep -Po "version=\d+\.\d+\.\d+(\.\d+)?" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update documentation
|
||||||
|
run: |
|
||||||
|
make doc
|
||||||
|
sed '/### /Q' Changelog.md >> ./CHANGELOG
|
||||||
|
echo '### ${{ steps.update_version.outputs.version }}' >> ./CHANGELOG
|
||||||
|
python ./devscripts/make_changelog.py -vv -c >> ./CHANGELOG
|
||||||
|
echo >> ./CHANGELOG
|
||||||
|
grep -Poz '(?s)### \d+\.\d+\.\d+.+' 'Changelog.md' | head -n -1 >> ./CHANGELOG
|
||||||
|
cat ./CHANGELOG > Changelog.md
|
||||||
|
|
||||||
|
- name: Push to release
|
||||||
|
id: push_release
|
||||||
|
if: ${{ !inputs.prerelease }}
|
||||||
|
run: |
|
||||||
|
git config --global user.name github-actions
|
||||||
|
git config --global user.email github-actions@example.com
|
||||||
|
git add -u
|
||||||
|
git commit -m "Release ${{ steps.update_version.outputs.version }}" \
|
||||||
|
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||||
|
git push origin --force ${{ github.event.ref }}:release
|
||||||
|
|
||||||
|
- name: Get target commitish
|
||||||
|
id: get_target
|
||||||
|
run: |
|
||||||
|
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update master
|
||||||
|
if: vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease
|
||||||
|
run: git push origin ${{ github.event.ref }}
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write # For package cache
|
||||||
|
secrets:
|
||||||
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
|
publish_pypi_homebrew:
|
||||||
|
needs: [prepare, build]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Install Requirements
|
||||||
|
run: |
|
||||||
|
sudo apt-get -y install pandoc man
|
||||||
|
python -m pip install -U pip setuptools wheel twine
|
||||||
|
python -m pip install -U -r requirements.txt
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py ${{ needs.prepare.outputs.version }}
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
|
- name: Build and publish on PyPI
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: __token__
|
||||||
|
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.TWINE_PASSWORD != '' && !inputs.prerelease
|
||||||
|
run: |
|
||||||
|
rm -rf dist/*
|
||||||
|
make pypi-files
|
||||||
|
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
|
twine upload dist/*
|
||||||
|
|
||||||
|
- name: Checkout Homebrew repository
|
||||||
|
env:
|
||||||
|
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: yt-dlp/homebrew-taps
|
||||||
|
path: taps
|
||||||
|
ssh-key: ${{ secrets.BREW_TOKEN }}
|
||||||
|
|
||||||
|
- name: Update Homebrew Formulae
|
||||||
|
env:
|
||||||
|
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
||||||
|
run: |
|
||||||
|
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.version }}"
|
||||||
|
git -C taps/ config user.name github-actions
|
||||||
|
git -C taps/ config user.email github-actions@example.com
|
||||||
|
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.version }}'
|
||||||
|
git -C taps/ push
|
||||||
|
|
||||||
|
publish:
|
||||||
|
needs: [prepare, build]
|
||||||
|
uses: ./.github/workflows/publish.yml
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
with:
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
prerelease: ${{ inputs.prerelease }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
||||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -27,8 +27,10 @@ cookies
|
|||||||
*.ass
|
*.ass
|
||||||
*.avi
|
*.avi
|
||||||
*.desktop
|
*.desktop
|
||||||
|
*.f4v
|
||||||
*.flac
|
*.flac
|
||||||
*.flv
|
*.flv
|
||||||
|
*.gif
|
||||||
*.jpeg
|
*.jpeg
|
||||||
*.jpg
|
*.jpg
|
||||||
*.m4a
|
*.m4a
|
||||||
@@ -38,6 +40,8 @@ cookies
|
|||||||
*.mov
|
*.mov
|
||||||
*.mp3
|
*.mp3
|
||||||
*.mp4
|
*.mp4
|
||||||
|
*.mpga
|
||||||
|
*.oga
|
||||||
*.ogg
|
*.ogg
|
||||||
*.opus
|
*.opus
|
||||||
*.png
|
*.png
|
||||||
@@ -45,6 +49,7 @@ cookies
|
|||||||
*.srt
|
*.srt
|
||||||
*.swf
|
*.swf
|
||||||
*.swp
|
*.swp
|
||||||
|
*.tt
|
||||||
*.ttml
|
*.ttml
|
||||||
*.url
|
*.url
|
||||||
*.vtt
|
*.vtt
|
||||||
@@ -67,6 +72,7 @@ dist/
|
|||||||
zip/
|
zip/
|
||||||
tmp/
|
tmp/
|
||||||
venv/
|
venv/
|
||||||
|
.venv/
|
||||||
completions/
|
completions/
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
@@ -83,6 +89,7 @@ updates_key.pem
|
|||||||
.tox
|
.tox
|
||||||
*.class
|
*.class
|
||||||
*.isorted
|
*.isorted
|
||||||
|
*.stackdump
|
||||||
|
|
||||||
# Generated
|
# Generated
|
||||||
AUTHORS
|
AUTHORS
|
||||||
@@ -114,9 +121,5 @@ yt-dlp.zip
|
|||||||
*/extractor/lazy_extractors.py
|
*/extractor/lazy_extractors.py
|
||||||
|
|
||||||
# Plugins
|
# Plugins
|
||||||
ytdlp_plugins/extractor/*
|
ytdlp_plugins/
|
||||||
!ytdlp_plugins/extractor/__init__.py
|
yt-dlp-plugins
|
||||||
!ytdlp_plugins/extractor/sample.py
|
|
||||||
ytdlp_plugins/postprocessor/*
|
|
||||||
!ytdlp_plugins/postprocessor/__init__.py
|
|
||||||
!ytdlp_plugins/postprocessor/sample.py
|
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ Before reporting any issue, type `yt-dlp -U`. This should report that you're up-
|
|||||||
|
|
||||||
### Is the issue already documented?
|
### Is the issue already documented?
|
||||||
|
|
||||||
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, feel free to write something along the lines of "This affects me as well, with version 2021.01.01. Here is some more information on the issue: ...". While some issues may be old, a new post into them often spurs rapid activity.
|
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, subcribe to it to be notified when there is any progress. Unless you have something useful to add to the converation, please refrain from commenting.
|
||||||
|
|
||||||
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
||||||
|
|
||||||
@@ -127,7 +127,7 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
|||||||
|
|
||||||
### Is the website primarily used for piracy?
|
### Is the website primarily used for piracy?
|
||||||
|
|
||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in deep fake. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -161,7 +161,7 @@ The same applies for changes to the documentation, code style, or overarching ch
|
|||||||
|
|
||||||
## Adding support for a new site
|
## Adding support for a new site
|
||||||
|
|
||||||
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](https://www.github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](#is-the-website-primarily-used-for-piracy)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
||||||
|
|
||||||
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
||||||
|
|
||||||
@@ -195,7 +195,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
# * A value
|
# * A value
|
||||||
# * MD5 checksum; start the string with md5:
|
# * MD5 checksum; start the string with md5:
|
||||||
# * A regular expression; start the string with re:
|
# * A regular expression; start the string with re:
|
||||||
# * Any Python type (for example int or float)
|
# * Any Python type, e.g. int or float
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
|
|
||||||
@@ -217,12 +217,12 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`.
|
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`.
|
||||||
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
|
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
|
||||||
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
||||||
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L91-L426). Add tests and code for as many as you want.
|
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L119-L440). Add tests and code for as many as you want.
|
||||||
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions) and check the code with [flake8](https://flake8.pycqa.org/en/latest/index.html#quickstart):
|
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions) and check the code with [flake8](https://flake8.pycqa.org/en/latest/index.html#quickstart):
|
||||||
|
|
||||||
$ flake8 yt_dlp/extractor/yourextractor.py
|
$ flake8 yt_dlp/extractor/yourextractor.py
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.6 and above. Backward compatibility is not required for even older versions of Python.
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.7 and above. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
$ git add yt_dlp/extractor/_extractors.py
|
$ git add yt_dlp/extractor/_extractors.py
|
||||||
@@ -246,12 +246,12 @@ In any case, thank you very much for your contributions!
|
|||||||
|
|
||||||
This section introduces a guide lines for writing idiomatic, robust and future-proof extractor code.
|
This section introduces a guide lines for writing idiomatic, robust and future-proof extractor code.
|
||||||
|
|
||||||
Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the the extractor will remain broken.
|
Extractors are very fragile by nature since they depend on the layout of the source data provided by 3rd party media hosters out of your control and this layout tends to change. As an extractor implementer your task is not only to write code that will extract media links and metadata correctly but also to minimize dependency on the source's layout and even to make the code foresee potential future changes and be ready for that. This is important because it will allow the extractor not to break on minor layout changes thus keeping old yt-dlp versions working. Even though this breakage issue may be easily fixed by a new version of yt-dlp, this could take some time, during which the extractor will remain broken.
|
||||||
|
|
||||||
|
|
||||||
### Mandatory and optional metafields
|
### Mandatory and optional metafields
|
||||||
|
|
||||||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L91-L426) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||||
|
|
||||||
- `id` (media identifier)
|
- `id` (media identifier)
|
||||||
- `title` (media title)
|
- `title` (media title)
|
||||||
@@ -261,7 +261,7 @@ The aforementioned metafields are the critical data that the extraction does not
|
|||||||
|
|
||||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||||
|
|
||||||
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - Eg: when the video is a live stream that has not started yet.
|
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - e.g. when the video is a live stream that has not started yet.
|
||||||
|
|
||||||
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
||||||
|
|
||||||
@@ -351,8 +351,9 @@ Say you extracted a list of thumbnails into `thumbnail_data` and want to iterate
|
|||||||
```python
|
```python
|
||||||
thumbnail_data = data.get('thumbnails') or []
|
thumbnail_data = data.get('thumbnails') or []
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
} for item in thumbnail_data] # correct
|
'height': item.get('h'),
|
||||||
|
} for item in thumbnail_data if item.get('url')] # correct
|
||||||
```
|
```
|
||||||
|
|
||||||
and not like:
|
and not like:
|
||||||
@@ -360,12 +361,27 @@ and not like:
|
|||||||
```python
|
```python
|
||||||
thumbnail_data = data.get('thumbnails')
|
thumbnail_data = data.get('thumbnails')
|
||||||
thumbnails = [{
|
thumbnails = [{
|
||||||
'url': item['url']
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
} for item in thumbnail_data] # incorrect
|
} for item in thumbnail_data] # incorrect
|
||||||
```
|
```
|
||||||
|
|
||||||
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
||||||
|
|
||||||
|
Alternately, this can be further simplified by using `traverse_obj`
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = [{
|
||||||
|
'url': item['url'],
|
||||||
|
'height': item.get('h'),
|
||||||
|
} for item in traverse_obj(data, ('thumbnails', lambda _, v: v['url']))]
|
||||||
|
```
|
||||||
|
|
||||||
|
or, even better,
|
||||||
|
|
||||||
|
```python
|
||||||
|
thumbnails = traverse_obj(data, ('thumbnails', ..., {'url': 'url', 'height': 'h'}))
|
||||||
|
```
|
||||||
|
|
||||||
### Provide fallbacks
|
### Provide fallbacks
|
||||||
|
|
||||||
@@ -680,7 +696,7 @@ formats = [
|
|||||||
|
|
||||||
### Use convenience conversion and parsing functions
|
### Use convenience conversion and parsing functions
|
||||||
|
|
||||||
Wrap all extracted numeric data into safe functions from [`yt_dlp/utils.py`](yt_dlp/utils.py): `int_or_none`, `float_or_none`. Use them for string to number conversions as well.
|
Wrap all extracted numeric data into safe functions from [`yt_dlp/utils/`](yt_dlp/utils/): `int_or_none`, `float_or_none`. Use them for string to number conversions as well.
|
||||||
|
|
||||||
Use `url_or_none` for safe URL processing.
|
Use `url_or_none` for safe URL processing.
|
||||||
|
|
||||||
@@ -688,7 +704,7 @@ Use `traverse_obj` and `try_call` (superseeds `dict_get` and `try_get`) for safe
|
|||||||
|
|
||||||
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field extraction, `unified_timestamp` for uniform `timestamp` extraction, `parse_filesize` for `filesize` extraction, `parse_count` for count meta fields extraction, `parse_resolution`, `parse_duration` for `duration` extraction, `parse_age_limit` for `age_limit` extraction.
|
||||||
|
|
||||||
Explore [`yt_dlp/utils.py`](yt_dlp/utils.py) for more useful convenience functions.
|
Explore [`yt_dlp/utils/`](yt_dlp/utils/) for more useful convenience functions.
|
||||||
|
|
||||||
#### Examples
|
#### Examples
|
||||||
|
|
||||||
|
|||||||
239
CONTRIBUTORS
239
CONTRIBUTORS
@@ -2,7 +2,8 @@ pukkandan (owner)
|
|||||||
shirt-dev (collaborator)
|
shirt-dev (collaborator)
|
||||||
coletdjnz/colethedj (collaborator)
|
coletdjnz/colethedj (collaborator)
|
||||||
Ashish0804 (collaborator)
|
Ashish0804 (collaborator)
|
||||||
nao20010128nao/Lesmiscore (collaborator)
|
bashonly (collaborator)
|
||||||
|
Grub4K (collaborator)
|
||||||
h-h-h-h
|
h-h-h-h
|
||||||
pauldubois98
|
pauldubois98
|
||||||
nixxo
|
nixxo
|
||||||
@@ -272,3 +273,239 @@ crazymoose77756
|
|||||||
nomevi
|
nomevi
|
||||||
Brett824
|
Brett824
|
||||||
pingiun
|
pingiun
|
||||||
|
dosy4ev
|
||||||
|
EhtishamSabir
|
||||||
|
Ferdi265
|
||||||
|
FirefoxMetzger
|
||||||
|
ftk
|
||||||
|
lamby
|
||||||
|
llamasblade
|
||||||
|
lockmatrix
|
||||||
|
misaelaguayo
|
||||||
|
odo2063
|
||||||
|
pritam20ps05
|
||||||
|
scy
|
||||||
|
sheerluck
|
||||||
|
AxiosDeminence
|
||||||
|
DjesonPV
|
||||||
|
eren-kemer
|
||||||
|
freezboltz
|
||||||
|
Galiley
|
||||||
|
haobinliang
|
||||||
|
Mehavoid
|
||||||
|
winterbird-code
|
||||||
|
yashkc2025
|
||||||
|
aldoridhoni
|
||||||
|
jacobtruman
|
||||||
|
masta79
|
||||||
|
palewire
|
||||||
|
cgrigis
|
||||||
|
DavidH-2022
|
||||||
|
dfaker
|
||||||
|
jackyyf
|
||||||
|
ohaiibuzzle
|
||||||
|
SamantazFox
|
||||||
|
shreyasminocha
|
||||||
|
tejasa97
|
||||||
|
xenov
|
||||||
|
satan1st
|
||||||
|
0xGodspeed
|
||||||
|
5736d79
|
||||||
|
587021c
|
||||||
|
basrieter
|
||||||
|
Bobscorn
|
||||||
|
CNugteren
|
||||||
|
columndeeply
|
||||||
|
DoubleCouponDay
|
||||||
|
Fabi019
|
||||||
|
GautamMKGarg
|
||||||
|
itachi-19
|
||||||
|
jeroenj
|
||||||
|
josanabr
|
||||||
|
LiviaMedeiros
|
||||||
|
nikita-moor
|
||||||
|
snapdgn
|
||||||
|
SuperSonicHub1
|
||||||
|
tannertechnology
|
||||||
|
Timendum
|
||||||
|
tobi1805
|
||||||
|
TokyoBlackHole
|
||||||
|
ajayyy
|
||||||
|
Alienmaster
|
||||||
|
bsun0000
|
||||||
|
changren-wcr
|
||||||
|
ClosedPort22
|
||||||
|
CrankDatSouljaBoy
|
||||||
|
cruel-efficiency
|
||||||
|
endotronic
|
||||||
|
Generator
|
||||||
|
gibson042
|
||||||
|
How-Bout-No
|
||||||
|
invertico
|
||||||
|
jahway603
|
||||||
|
jwoglom
|
||||||
|
lksj
|
||||||
|
megapro17
|
||||||
|
mlampe
|
||||||
|
MrOctopus
|
||||||
|
nosoop
|
||||||
|
puc9
|
||||||
|
sashashura
|
||||||
|
schnusch
|
||||||
|
SG5
|
||||||
|
the-marenga
|
||||||
|
tkgmomosheep
|
||||||
|
vitkhab
|
||||||
|
glensc
|
||||||
|
synthpop123
|
||||||
|
tntmod54321
|
||||||
|
milkknife
|
||||||
|
Bnyro
|
||||||
|
CapacitorSet
|
||||||
|
stelcodes
|
||||||
|
skbeh
|
||||||
|
muddi900
|
||||||
|
digitall
|
||||||
|
chengzhicn
|
||||||
|
mexus
|
||||||
|
JChris246
|
||||||
|
redraskal
|
||||||
|
Spicadox
|
||||||
|
barsnick
|
||||||
|
docbender
|
||||||
|
KurtBestor
|
||||||
|
Chrissi2812
|
||||||
|
FrederikNS
|
||||||
|
gschizas
|
||||||
|
JC-Chung
|
||||||
|
mzhou
|
||||||
|
OndrejBakan
|
||||||
|
ab4cbef
|
||||||
|
aionescu
|
||||||
|
amra
|
||||||
|
ByteDream
|
||||||
|
carusocr
|
||||||
|
chexxor
|
||||||
|
felixonmars
|
||||||
|
FrankZ85
|
||||||
|
FriedrichRehren
|
||||||
|
gregsadetsky
|
||||||
|
LeoniePhiline
|
||||||
|
LowSuggestion912
|
||||||
|
Matumo
|
||||||
|
OIRNOIR
|
||||||
|
OMEGARAZER
|
||||||
|
oxamun
|
||||||
|
pmitchell86
|
||||||
|
qbnu
|
||||||
|
qulaz
|
||||||
|
rebane2001
|
||||||
|
road-master
|
||||||
|
rohieb
|
||||||
|
sdht0
|
||||||
|
seproDev
|
||||||
|
Hill-98
|
||||||
|
LXYan2333
|
||||||
|
mushbite
|
||||||
|
venkata-krishnas
|
||||||
|
7vlad7
|
||||||
|
alexklapheke
|
||||||
|
arobase-che
|
||||||
|
bepvte
|
||||||
|
bergoid
|
||||||
|
blmarket
|
||||||
|
brandon-dacrib
|
||||||
|
c-basalt
|
||||||
|
CoryTibbettsDev
|
||||||
|
Cyberes
|
||||||
|
D0LLYNH0
|
||||||
|
danog
|
||||||
|
DataGhost
|
||||||
|
falbrechtskirchinger
|
||||||
|
foreignBlade
|
||||||
|
garret1317
|
||||||
|
hasezoey
|
||||||
|
hoaluvn
|
||||||
|
ItzMaxTV
|
||||||
|
ivanskodje
|
||||||
|
jo-nike
|
||||||
|
kangalio
|
||||||
|
linsui
|
||||||
|
makew0rld
|
||||||
|
menschel
|
||||||
|
mikf
|
||||||
|
mrscrapy
|
||||||
|
NDagestad
|
||||||
|
Neurognostic
|
||||||
|
NextFire
|
||||||
|
nick-cd
|
||||||
|
permunkle
|
||||||
|
pzhlkj6612
|
||||||
|
ringus1
|
||||||
|
rjy
|
||||||
|
Schmoaaaaah
|
||||||
|
sjthespian
|
||||||
|
theperfectpunk
|
||||||
|
toomyzoom
|
||||||
|
truedread
|
||||||
|
TxI5
|
||||||
|
unbeatable-101
|
||||||
|
vampirefrog
|
||||||
|
vidiot720
|
||||||
|
viktor-enzell
|
||||||
|
zhgwn
|
||||||
|
barthelmannk
|
||||||
|
berkanteber
|
||||||
|
OverlordQ
|
||||||
|
rexlambert22
|
||||||
|
Ti4eeT4e
|
||||||
|
AmanSal1
|
||||||
|
bbilly1
|
||||||
|
meliber
|
||||||
|
nnoboa
|
||||||
|
rdamas
|
||||||
|
RfadnjdExt
|
||||||
|
urectanc
|
||||||
|
nao20010128nao/Lesmiscore
|
||||||
|
04-pasha-04
|
||||||
|
aaruni96
|
||||||
|
aky-01
|
||||||
|
AmirAflak
|
||||||
|
ApoorvShah111
|
||||||
|
at-wat
|
||||||
|
davinkevin
|
||||||
|
demon071
|
||||||
|
denhotte
|
||||||
|
FinnRG
|
||||||
|
fireattack
|
||||||
|
Frankgoji
|
||||||
|
GD-Slime
|
||||||
|
hatsomatt
|
||||||
|
ifan-t
|
||||||
|
kshitiz305
|
||||||
|
kylegustavo
|
||||||
|
mabdelfattah
|
||||||
|
nathantouze
|
||||||
|
niemands
|
||||||
|
Rajeshwaran2001
|
||||||
|
RedDeffender
|
||||||
|
Rohxn16
|
||||||
|
sb0stn
|
||||||
|
SevenLives
|
||||||
|
simon300000
|
||||||
|
snixon
|
||||||
|
soundchaser128
|
||||||
|
szabyg
|
||||||
|
trainman261
|
||||||
|
trislee
|
||||||
|
wader
|
||||||
|
Yalab7
|
||||||
|
zhallgato
|
||||||
|
zhong-yiyu
|
||||||
|
Zprokkel
|
||||||
|
AS6939
|
||||||
|
drzraf
|
||||||
|
handlerug
|
||||||
|
jiru
|
||||||
|
madewokherd
|
||||||
|
xofe
|
||||||
|
|||||||
1417
Changelog.md
1417
Changelog.md
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,7 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
## [pukkandan](https://github.com/pukkandan)
|
## [pukkandan](https://github.com/pukkandan)
|
||||||
|
|
||||||
[](https://ko-fi.com/pukkandan)
|
[](https://ko-fi.com/pukkandan)
|
||||||
|
[](https://github.com/sponsors/pukkandan)
|
||||||
|
|
||||||
* Owner of the fork
|
* Owner of the fork
|
||||||
|
|
||||||
@@ -25,15 +26,16 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
|
|
||||||
## [coletdjnz](https://github.com/coletdjnz)
|
## [coletdjnz](https://github.com/coletdjnz)
|
||||||
|
|
||||||
[](https://github.com/sponsors/coletdjnz)
|
[](https://github.com/sponsors/coletdjnz)
|
||||||
|
|
||||||
|
* Improved plugin architecture
|
||||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||||
* Added support for downloading YoutubeWebArchive videos
|
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||||
* Added support for new websites MainStreaming, PRX, nzherald, etc
|
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [Ashish0804](https://github.com/Ashish0804)
|
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||||
|
|
||||||
[](https://ko-fi.com/ashish0804)
|
[](https://ko-fi.com/ashish0804)
|
||||||
|
|
||||||
@@ -42,10 +44,18 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||||
|
|
||||||
|
|
||||||
## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao)
|
## [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
**Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s
|
* `--update-to`, automated release, nightly builds
|
||||||
**Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr
|
* `--cookies-from-browser` support for Firefox containers
|
||||||
|
* Added support for new websites Genius, Kick, NBCStations, Triller, VideoKen etc
|
||||||
|
* Improved/fixed support for Anvato, Brightcove, Instagram, ParamountPlus, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||||
|
|
||||||
* Download live from start to end for YouTube
|
|
||||||
* Added support for new websites mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
## [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
[](https://ko-fi.com/Grub4K) [](https://github.com/sponsors/Grub4K)
|
||||||
|
|
||||||
|
* `--update-to`, automated release, nightly builds
|
||||||
|
* Rework internals like `traverse_obj`, various core refactors and bugs fixes
|
||||||
|
* Helped fix crunchyroll, Twitter, wrestleuniverse, wistia, slideslive etc
|
||||||
|
|||||||
14
Makefile
14
Makefile
@@ -17,8 +17,8 @@ pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
|||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.flac *.flv *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
||||||
*.mp4 *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mp4 *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||||
@@ -33,7 +33,6 @@ completion-zsh: completions/zsh/_yt-dlp
|
|||||||
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
||||||
|
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
DESTDIR ?= .
|
|
||||||
BINDIR ?= $(PREFIX)/bin
|
BINDIR ?= $(PREFIX)/bin
|
||||||
MANDIR ?= $(PREFIX)/man
|
MANDIR ?= $(PREFIX)/man
|
||||||
SHAREDIR ?= $(PREFIX)/share
|
SHAREDIR ?= $(PREFIX)/share
|
||||||
@@ -75,17 +74,16 @@ offlinetest: codetest
|
|||||||
$(PYTHON) -m pytest -k "not download"
|
$(PYTHON) -m pytest -k "not download"
|
||||||
|
|
||||||
# XXX: This is hard to maintain
|
# XXX: This is hard to maintain
|
||||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat \
|
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/compat/urllib yt_dlp/utils yt_dlp/dependencies yt_dlp/networking
|
||||||
yt_dlp/extractor/anvato_token_generator
|
|
||||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
||||||
mkdir -p zip
|
mkdir -p zip
|
||||||
for d in $(CODE_FOLDERS) ; do \
|
for d in $(CODE_FOLDERS) ; do \
|
||||||
mkdir -p zip/$$d ;\
|
mkdir -p zip/$$d ;\
|
||||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
done
|
done
|
||||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py zip/yt_dlp/*/*/*.py
|
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
||||||
mv zip/yt_dlp/__main__.py zip/
|
mv zip/yt_dlp/__main__.py zip/
|
||||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py yt_dlp/*/*/*.py __main__.py
|
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
||||||
rm -rf zip
|
rm -rf zip
|
||||||
echo '#!$(PYTHON)' > yt-dlp
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
cat yt-dlp.zip >> yt-dlp
|
cat yt-dlp.zip >> yt-dlp
|
||||||
@@ -134,7 +132,7 @@ yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscrip
|
|||||||
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
||||||
|
|
||||||
yt-dlp.tar.gz: all
|
yt-dlp.tar.gz: all
|
||||||
@tar -czf $(DESTDIR)/yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
@tar -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||||
--exclude '*.DS_Store' \
|
--exclude '*.DS_Store' \
|
||||||
--exclude '*.kate-swp' \
|
--exclude '*.kate-swp' \
|
||||||
--exclude '*.pyc' \
|
--exclude '*.pyc' \
|
||||||
|
|||||||
1
devscripts/__init__.py
Normal file
1
devscripts/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Empty file needed to make devscripts.utils properly importable from outside
|
||||||
102
devscripts/changelog_override.json
Normal file
102
devscripts/changelog_override.json
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "29cb20bd563c02671b31dd840139e93dd37150a1",
|
||||||
|
"short": "[priority] **A new release type has been added!**\n * [`nightly`](https://github.com/yt-dlp/yt-dlp/releases/tag/nightly) builds will be made after each push, containing the latest fixes (but also possibly bugs).\n * When using `--update`/`-U`, a release binary will only update to its current channel (either `stable` or `nightly`).\n * The `--update-to` option has been added allowing the user more control over program upgrades (or downgrades).\n * `--update-to` can change the release channel (`stable`, `nightly`) and also upgrade or downgrade to specific tags.\n * **Usage**: `--update-to CHANNEL`, `--update-to TAG`, `--update-to CHANNEL@TAG`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "5038f6d713303e0967d002216e7a88652401c22a",
|
||||||
|
"short": "[priority] **YouTube throttling fixes!**"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "2e023649ea4e11151545a34dc1360c114981a236"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "01aba2519a0884ef17d5f85608dbd2a455577147",
|
||||||
|
"short": "[priority] YouTube: Improved throttling and signature fixes"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "c86e433c35fe5da6cb29f3539eef97497f84ed38",
|
||||||
|
"short": "[extractor/niconico:series] Fix extraction (#6898)",
|
||||||
|
"authors": ["sqrtNOT"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "69a40e4a7f6caa5662527ebd2f3c4e8aa02857a2",
|
||||||
|
"short": "[extractor/youtube:music_search_url] Extract title (#7102)",
|
||||||
|
"authors": ["kangalio"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "8417f26b8a819cd7ffcd4e000ca3e45033e670fb",
|
||||||
|
"short": "Add option `--color` (#6904)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b4e0d75848e9447cee2cd3646ce54d4744a7ff56",
|
||||||
|
"short": "Improve `--download-sections`\n - Support negative time-ranges\n - Add `*from-url` to obey time-ranges in URL",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "1e75d97db21152acc764b30a688e516f04b8a142",
|
||||||
|
"short": "[extractor/youtube] Add `ios` to default clients used\n - IOS is affected neither by 403 nor by nsig so helps mitigate them preemptively\n - IOS also has higher bit-rate 'premium' formats though they are not labeled as such",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "f2ff0f6f1914b82d4a51681a72cc0828115dcb4a",
|
||||||
|
"short": "[extractor/motherless] Add gallery support, fix groups (#7211)",
|
||||||
|
"authors": ["rexlambert22", "Ti4eeT4e"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "a4486bfc1dc7057efca9dd3fe70d7fa25c56f700",
|
||||||
|
"short": "[misc] Revert \"Add automatic duplicate issue detection\"",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "1ceb657bdd254ad961489e5060f2ccc7d556b729",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-35934](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-35934)] Fix [Cookie leak](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-v8mc-9377-rwjj)\n - `--add-header Cookie:` is deprecated and auto-scoped to input URL domains\n - Cookies are scoped when passed to external downloaders\n - Add `cookies` field to info.json and deprecate `http_headers.Cookie`"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b03fa7834579a01cc5fba48c0e73488a16683d48",
|
||||||
|
"short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "fcd6a76adc49d5cd8783985c7ce35384b72e545f",
|
||||||
|
"short": "[test] Add tests for socks proxies (#7908)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "4bf912282a34b58b6b35d8f7e6be535770c89c76",
|
||||||
|
"short": "[rh:urllib] Remove dot segments during URL normalization (#7662)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "59e92b1f1833440bb2190f847eb735cf0f90bc85",
|
||||||
|
"short": "[rh:urllib] Simplify gzip decoding (#7611)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "c1d71d0d9f41db5e4306c86af232f5f6220a130b",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.8**\nSince Python 3.7 has reached end-of-life, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/7803)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "61bdf15fc7400601c3da1aa7a43917310a5bf391",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-40581](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40581)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-42h4-v29r-42qg)\n - The shell escape function is now using `\"\"` instead of `\\\"`.\n - `utils.Popen` has been patched to properly quote commands."
|
||||||
|
}
|
||||||
|
]
|
||||||
96
devscripts/changelog_override.schema.json
Normal file
96
devscripts/changelog_override.schema.json
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft/2020-12/schema",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"add"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"short"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"remove"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"action": {
|
||||||
|
"enum": [
|
||||||
|
"change"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"when": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^([0-9a-f]{40}|\\d{4}\\.\\d{2}\\.\\d{2})$"
|
||||||
|
},
|
||||||
|
"hash": {
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "^[0-9a-f]{40}$"
|
||||||
|
},
|
||||||
|
"short": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"authors": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"action",
|
||||||
|
"hash",
|
||||||
|
"short",
|
||||||
|
"authors"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
48
devscripts/cli_to_api.py
Normal file
48
devscripts/cli_to_api.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import yt_dlp
|
||||||
|
import yt_dlp.options
|
||||||
|
|
||||||
|
create_parser = yt_dlp.options.create_parser
|
||||||
|
|
||||||
|
|
||||||
|
def parse_patched_options(opts):
|
||||||
|
patched_parser = create_parser()
|
||||||
|
patched_parser.defaults.update({
|
||||||
|
'ignoreerrors': False,
|
||||||
|
'retries': 0,
|
||||||
|
'fragment_retries': 0,
|
||||||
|
'extract_flat': False,
|
||||||
|
'concat_playlist': 'never',
|
||||||
|
})
|
||||||
|
yt_dlp.options.create_parser = lambda: patched_parser
|
||||||
|
try:
|
||||||
|
return yt_dlp.parse_options(opts)
|
||||||
|
finally:
|
||||||
|
yt_dlp.options.create_parser = create_parser
|
||||||
|
|
||||||
|
|
||||||
|
default_opts = parse_patched_options([]).ydl_opts
|
||||||
|
|
||||||
|
|
||||||
|
def cli_to_api(opts, cli_defaults=False):
|
||||||
|
opts = (yt_dlp.parse_options if cli_defaults else parse_patched_options)(opts).ydl_opts
|
||||||
|
|
||||||
|
diff = {k: v for k, v in opts.items() if default_opts[k] != v}
|
||||||
|
if 'postprocessors' in diff:
|
||||||
|
diff['postprocessors'] = [pp for pp in diff['postprocessors']
|
||||||
|
if pp not in default_opts['postprocessors']]
|
||||||
|
return diff
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
print('\nThe arguments passed translate to:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:]))
|
||||||
|
print('\nCombining these with the CLI defaults gives:\n')
|
||||||
|
pprint(cli_to_api(sys.argv[1:], True))
|
||||||
@@ -6,17 +6,23 @@ from ..utils import (
|
|||||||
age_restricted,
|
age_restricted,
|
||||||
bug_reports_message,
|
bug_reports_message,
|
||||||
classproperty,
|
classproperty,
|
||||||
|
variadic,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# These bloat the lazy_extractors, so allow them to passthrough silently
|
||||||
|
ALLOWED_CLASSMETHODS = {'extract_from_webpage', 'get_testcases', 'get_webpage_testcases'}
|
||||||
|
_WARNED = False
|
||||||
|
|
||||||
|
|
||||||
class LazyLoadMetaClass(type):
|
class LazyLoadMetaClass(type):
|
||||||
def __getattr__(cls, name):
|
def __getattr__(cls, name):
|
||||||
# "_TESTS" bloat the lazy_extractors
|
global _WARNED
|
||||||
if '_real_class' not in cls.__dict__ and name != 'get_testcases':
|
if ('_real_class' not in cls.__dict__
|
||||||
write_string(
|
and name not in ALLOWED_CLASSMETHODS and not _WARNED):
|
||||||
'WARNING: Falling back to normal extractor since lazy extractor '
|
_WARNED = True
|
||||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
write_string('WARNING: Falling back to normal extractor since lazy extractor '
|
||||||
|
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||||
return getattr(cls.real_class, name)
|
return getattr(cls.real_class, name)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
516
devscripts/make_changelog.py
Normal file
516
devscripts/make_changelog.py
Normal file
@@ -0,0 +1,516 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.utils import read_file, run_process, write_file
|
||||||
|
|
||||||
|
BASE_URL = 'https://github.com'
|
||||||
|
LOCATION_PATH = Path(__file__).parent
|
||||||
|
HASH_LENGTH = 7
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CommitGroup(enum.Enum):
|
||||||
|
PRIORITY = 'Important'
|
||||||
|
CORE = 'Core'
|
||||||
|
EXTRACTOR = 'Extractor'
|
||||||
|
DOWNLOADER = 'Downloader'
|
||||||
|
POSTPROCESSOR = 'Postprocessor'
|
||||||
|
NETWORKING = 'Networking'
|
||||||
|
MISC = 'Misc.'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def subgroup_lookup(cls):
|
||||||
|
return {
|
||||||
|
name: group
|
||||||
|
for group, names in {
|
||||||
|
cls.CORE: {
|
||||||
|
'aes',
|
||||||
|
'cache',
|
||||||
|
'compat_utils',
|
||||||
|
'compat',
|
||||||
|
'cookies',
|
||||||
|
'dependencies',
|
||||||
|
'formats',
|
||||||
|
'jsinterp',
|
||||||
|
'outtmpl',
|
||||||
|
'plugins',
|
||||||
|
'update',
|
||||||
|
'utils',
|
||||||
|
},
|
||||||
|
cls.MISC: {
|
||||||
|
'build',
|
||||||
|
'cleanup',
|
||||||
|
'devscripts',
|
||||||
|
'docs',
|
||||||
|
'test',
|
||||||
|
},
|
||||||
|
cls.NETWORKING: {
|
||||||
|
'rh',
|
||||||
|
},
|
||||||
|
}.items()
|
||||||
|
for name in names
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@lru_cache
|
||||||
|
def group_lookup(cls):
|
||||||
|
result = {
|
||||||
|
'fd': cls.DOWNLOADER,
|
||||||
|
'ie': cls.EXTRACTOR,
|
||||||
|
'pp': cls.POSTPROCESSOR,
|
||||||
|
'upstream': cls.CORE,
|
||||||
|
}
|
||||||
|
result.update({item.name.lower(): item for item in iter(cls)})
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||||
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||||
|
|
||||||
|
result = cls.group_lookup().get(group)
|
||||||
|
if not result:
|
||||||
|
if subgroup:
|
||||||
|
return None, value
|
||||||
|
subgroup = group
|
||||||
|
result = cls.subgroup_lookup().get(subgroup)
|
||||||
|
|
||||||
|
return result, subgroup or None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Commit:
|
||||||
|
hash: str | None
|
||||||
|
short: str
|
||||||
|
authors: list[str]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
result = f'{self.short!r}'
|
||||||
|
|
||||||
|
if self.hash:
|
||||||
|
result += f' ({self.hash[:HASH_LENGTH]})'
|
||||||
|
|
||||||
|
if self.authors:
|
||||||
|
authors = ', '.join(self.authors)
|
||||||
|
result += f' by {authors}'
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CommitInfo:
|
||||||
|
details: str | None
|
||||||
|
sub_details: tuple[str, ...]
|
||||||
|
message: str
|
||||||
|
issues: list[str]
|
||||||
|
commit: Commit
|
||||||
|
fixes: list[Commit]
|
||||||
|
|
||||||
|
def key(self):
|
||||||
|
return ((self.details or '').lower(), self.sub_details, self.message)
|
||||||
|
|
||||||
|
|
||||||
|
def unique(items):
|
||||||
|
return sorted({item.strip().lower(): item for item in items if item}.values())
|
||||||
|
|
||||||
|
|
||||||
|
class Changelog:
|
||||||
|
MISC_RE = re.compile(r'(?:^|\b)(?:lint(?:ing)?|misc|format(?:ting)?|fixes)(?:\b|$)', re.IGNORECASE)
|
||||||
|
ALWAYS_SHOWN = (CommitGroup.PRIORITY,)
|
||||||
|
|
||||||
|
def __init__(self, groups, repo, collapsible=False):
|
||||||
|
self._groups = groups
|
||||||
|
self._repo = repo
|
||||||
|
self._collapsible = collapsible
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '\n'.join(self._format_groups(self._groups)).replace('\t', ' ')
|
||||||
|
|
||||||
|
def _format_groups(self, groups):
|
||||||
|
first = True
|
||||||
|
for item in CommitGroup:
|
||||||
|
if self._collapsible and item not in self.ALWAYS_SHOWN and first:
|
||||||
|
first = False
|
||||||
|
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||||
|
|
||||||
|
group = groups[item]
|
||||||
|
if group:
|
||||||
|
yield self.format_module(item.value, group)
|
||||||
|
|
||||||
|
if self._collapsible:
|
||||||
|
yield '\n</details>'
|
||||||
|
|
||||||
|
def format_module(self, name, group):
|
||||||
|
result = f'\n#### {name} changes\n' if name else '\n'
|
||||||
|
return result + '\n'.join(self._format_group(group))
|
||||||
|
|
||||||
|
def _format_group(self, group):
|
||||||
|
sorted_group = sorted(group, key=CommitInfo.key)
|
||||||
|
detail_groups = itertools.groupby(sorted_group, lambda item: (item.details or '').lower())
|
||||||
|
for _, items in detail_groups:
|
||||||
|
items = list(items)
|
||||||
|
details = items[0].details
|
||||||
|
|
||||||
|
if details == 'cleanup':
|
||||||
|
items = self._prepare_cleanup_misc_items(items)
|
||||||
|
|
||||||
|
prefix = '-'
|
||||||
|
if details:
|
||||||
|
if len(items) == 1:
|
||||||
|
prefix = f'- **{details}**:'
|
||||||
|
else:
|
||||||
|
yield f'- **{details}**'
|
||||||
|
prefix = '\t-'
|
||||||
|
|
||||||
|
sub_detail_groups = itertools.groupby(items, lambda item: tuple(map(str.lower, item.sub_details)))
|
||||||
|
for sub_details, entries in sub_detail_groups:
|
||||||
|
if not sub_details:
|
||||||
|
for entry in entries:
|
||||||
|
yield f'{prefix} {self.format_single_change(entry)}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
entries = list(entries)
|
||||||
|
sub_prefix = f'{prefix} {", ".join(entries[0].sub_details)}'
|
||||||
|
if len(entries) == 1:
|
||||||
|
yield f'{sub_prefix}: {self.format_single_change(entries[0])}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield sub_prefix
|
||||||
|
for entry in entries:
|
||||||
|
yield f'\t{prefix} {self.format_single_change(entry)}'
|
||||||
|
|
||||||
|
def _prepare_cleanup_misc_items(self, items):
|
||||||
|
cleanup_misc_items = defaultdict(list)
|
||||||
|
sorted_items = []
|
||||||
|
for item in items:
|
||||||
|
if self.MISC_RE.search(item.message):
|
||||||
|
cleanup_misc_items[tuple(item.commit.authors)].append(item)
|
||||||
|
else:
|
||||||
|
sorted_items.append(item)
|
||||||
|
|
||||||
|
for commit_infos in cleanup_misc_items.values():
|
||||||
|
sorted_items.append(CommitInfo(
|
||||||
|
'cleanup', ('Miscellaneous',), ', '.join(
|
||||||
|
self._format_message_link(None, info.commit.hash)
|
||||||
|
for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
|
||||||
|
[], Commit(None, '', commit_infos[0].commit.authors), []))
|
||||||
|
|
||||||
|
return sorted_items
|
||||||
|
|
||||||
|
def format_single_change(self, info: CommitInfo):
|
||||||
|
message, sep, rest = info.message.partition('\n')
|
||||||
|
if '[' not in message:
|
||||||
|
# If the message doesn't already contain markdown links, try to add a link to the commit
|
||||||
|
message = self._format_message_link(message, info.commit.hash)
|
||||||
|
|
||||||
|
if info.issues:
|
||||||
|
message = f'{message} ({self._format_issues(info.issues)})'
|
||||||
|
|
||||||
|
if info.commit.authors:
|
||||||
|
message = f'{message} by {self._format_authors(info.commit.authors)}'
|
||||||
|
|
||||||
|
if info.fixes:
|
||||||
|
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
||||||
|
|
||||||
|
authors = sorted({author for fix in info.fixes for author in fix.authors}, key=str.casefold)
|
||||||
|
if authors != info.commit.authors:
|
||||||
|
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
||||||
|
|
||||||
|
message = f'{message} (With fixes in {fix_message})'
|
||||||
|
|
||||||
|
return message if not sep else f'{message}{sep}{rest}'
|
||||||
|
|
||||||
|
def _format_message_link(self, message, hash):
|
||||||
|
assert message or hash, 'Improperly defined commit message or override'
|
||||||
|
message = message if message else hash[:HASH_LENGTH]
|
||||||
|
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
||||||
|
|
||||||
|
def _format_issues(self, issues):
|
||||||
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _format_authors(authors):
|
||||||
|
return ', '.join(f'[{author}]({BASE_URL}/{author})' for author in authors)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo_url(self):
|
||||||
|
return f'{BASE_URL}/{self._repo}'
|
||||||
|
|
||||||
|
|
||||||
|
class CommitRange:
|
||||||
|
COMMAND = 'git'
|
||||||
|
COMMIT_SEPARATOR = '-----'
|
||||||
|
|
||||||
|
AUTHOR_INDICATOR_RE = re.compile(r'Authored by:? ', re.IGNORECASE)
|
||||||
|
MESSAGE_RE = re.compile(r'''
|
||||||
|
(?:\[(?P<prefix>[^\]]+)\]\ )?
|
||||||
|
(?:(?P<sub_details>`?[\w.-]+`?): )?
|
||||||
|
(?P<message>.+?)
|
||||||
|
(?:\ \((?P<issues>\#\d+(?:,\ \#\d+)*)\))?
|
||||||
|
''', re.VERBOSE | re.DOTALL)
|
||||||
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||||
|
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||||
|
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
|
||||||
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||||
|
|
||||||
|
def __init__(self, start, end, default_author=None):
|
||||||
|
self._start, self._end = start, end
|
||||||
|
self._commits, self._fixes = self._get_commits_and_fixes(default_author)
|
||||||
|
self._commits_added = []
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(itertools.chain(self._commits.values(), self._commits_added))
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._commits) + len(self._commits_added)
|
||||||
|
|
||||||
|
def __contains__(self, commit):
|
||||||
|
if isinstance(commit, Commit):
|
||||||
|
if not commit.hash:
|
||||||
|
return False
|
||||||
|
commit = commit.hash
|
||||||
|
|
||||||
|
return commit in self._commits
|
||||||
|
|
||||||
|
def _get_commits_and_fixes(self, default_author):
|
||||||
|
result = run_process(
|
||||||
|
self.COMMAND, 'log', f'--format=%H%n%s%n%b%n{self.COMMIT_SEPARATOR}',
|
||||||
|
f'{self._start}..{self._end}' if self._start else self._end).stdout
|
||||||
|
|
||||||
|
commits, reverts = {}, {}
|
||||||
|
fixes = defaultdict(list)
|
||||||
|
lines = iter(result.splitlines(False))
|
||||||
|
for i, commit_hash in enumerate(lines):
|
||||||
|
short = next(lines)
|
||||||
|
skip = short.startswith('Release ') or short == '[version] update'
|
||||||
|
|
||||||
|
authors = [default_author] if default_author else []
|
||||||
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||||
|
match = self.AUTHOR_INDICATOR_RE.match(line)
|
||||||
|
if match:
|
||||||
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||||
|
|
||||||
|
commit = Commit(commit_hash, short, authors)
|
||||||
|
if skip and (self._start or not i):
|
||||||
|
logger.debug(f'Skipped commit: {commit}')
|
||||||
|
continue
|
||||||
|
elif skip:
|
||||||
|
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||||
|
break
|
||||||
|
|
||||||
|
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
||||||
|
if revert_match:
|
||||||
|
reverts[revert_match.group(1)] = commit
|
||||||
|
continue
|
||||||
|
|
||||||
|
fix_match = self.FIXES_RE.search(commit.short)
|
||||||
|
if fix_match:
|
||||||
|
commitish = fix_match.group(1)
|
||||||
|
fixes[commitish].append(commit)
|
||||||
|
|
||||||
|
commits[commit.hash] = commit
|
||||||
|
|
||||||
|
for commitish, revert_commit in reverts.items():
|
||||||
|
reverted = commits.pop(commitish, None)
|
||||||
|
if reverted:
|
||||||
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||||
|
else:
|
||||||
|
commits[revert_commit.hash] = revert_commit
|
||||||
|
|
||||||
|
for commitish, fix_commits in fixes.items():
|
||||||
|
if commitish in commits:
|
||||||
|
hashes = ', '.join(commit.hash[:HASH_LENGTH] for commit in fix_commits)
|
||||||
|
logger.info(f'Found fix(es) for {commitish[:HASH_LENGTH]}: {hashes}')
|
||||||
|
for fix_commit in fix_commits:
|
||||||
|
del commits[fix_commit.hash]
|
||||||
|
else:
|
||||||
|
logger.debug(f'Commit with fixes not in changes: {commitish[:HASH_LENGTH]}')
|
||||||
|
|
||||||
|
return commits, fixes
|
||||||
|
|
||||||
|
def apply_overrides(self, overrides):
|
||||||
|
for override in overrides:
|
||||||
|
when = override.get('when')
|
||||||
|
if when and when not in self and when != self._start:
|
||||||
|
logger.debug(f'Ignored {when!r} override')
|
||||||
|
continue
|
||||||
|
|
||||||
|
override_hash = override.get('hash') or when
|
||||||
|
if override['action'] == 'add':
|
||||||
|
commit = Commit(override.get('hash'), override['short'], override.get('authors') or [])
|
||||||
|
logger.info(f'ADD {commit}')
|
||||||
|
self._commits_added.append(commit)
|
||||||
|
|
||||||
|
elif override['action'] == 'remove':
|
||||||
|
if override_hash in self._commits:
|
||||||
|
logger.info(f'REMOVE {self._commits[override_hash]}')
|
||||||
|
del self._commits[override_hash]
|
||||||
|
|
||||||
|
elif override['action'] == 'change':
|
||||||
|
if override_hash not in self._commits:
|
||||||
|
continue
|
||||||
|
commit = Commit(override_hash, override['short'], override.get('authors') or [])
|
||||||
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||||
|
self._commits[commit.hash] = commit
|
||||||
|
|
||||||
|
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
||||||
|
|
||||||
|
def groups(self):
|
||||||
|
group_dict = defaultdict(list)
|
||||||
|
for commit in self:
|
||||||
|
upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
|
||||||
|
if upstream_re:
|
||||||
|
commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}'
|
||||||
|
|
||||||
|
match = self.MESSAGE_RE.fullmatch(commit.short)
|
||||||
|
if not match:
|
||||||
|
logger.error(f'Error parsing short commit message: {commit.short!r}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
prefix, sub_details_alt, message, issues = match.groups()
|
||||||
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||||
|
|
||||||
|
if prefix:
|
||||||
|
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||||
|
group = next(iter(filter(None, groups)), None)
|
||||||
|
details = ', '.join(unique(details))
|
||||||
|
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||||
|
else:
|
||||||
|
group = CommitGroup.CORE
|
||||||
|
details = None
|
||||||
|
sub_details = []
|
||||||
|
|
||||||
|
if sub_details_alt:
|
||||||
|
sub_details.append(sub_details_alt)
|
||||||
|
sub_details = tuple(unique(sub_details))
|
||||||
|
|
||||||
|
if not group:
|
||||||
|
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
||||||
|
group = CommitGroup.EXTRACTOR
|
||||||
|
else:
|
||||||
|
group = CommitGroup.POSTPROCESSOR
|
||||||
|
logger.warning(f'Failed to map {commit.short!r}, selected {group.name.lower()}')
|
||||||
|
|
||||||
|
commit_info = CommitInfo(
|
||||||
|
details, sub_details, message.strip(),
|
||||||
|
issues, commit, self._fixes[commit.hash])
|
||||||
|
|
||||||
|
logger.debug(f'Resolved {commit.short!r} to {commit_info!r}')
|
||||||
|
group_dict[group].append(commit_info)
|
||||||
|
|
||||||
|
return group_dict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def details_from_prefix(prefix):
|
||||||
|
if not prefix:
|
||||||
|
return CommitGroup.CORE, None, ()
|
||||||
|
|
||||||
|
prefix, *sub_details = prefix.split(':')
|
||||||
|
|
||||||
|
group, details = CommitGroup.get(prefix)
|
||||||
|
if group is CommitGroup.PRIORITY and details:
|
||||||
|
details = details.partition('/')[2].strip()
|
||||||
|
|
||||||
|
if details and '/' in details:
|
||||||
|
logger.error(f'Prefix is overnested, using first part: {prefix}')
|
||||||
|
details = details.partition('/')[0].strip()
|
||||||
|
|
||||||
|
if details == 'common':
|
||||||
|
details = None
|
||||||
|
elif group is CommitGroup.NETWORKING and details == 'rh':
|
||||||
|
details = 'Request Handler'
|
||||||
|
|
||||||
|
return group, details, sub_details
|
||||||
|
|
||||||
|
|
||||||
|
def get_new_contributors(contributors_path, commits):
|
||||||
|
contributors = set()
|
||||||
|
if contributors_path.exists():
|
||||||
|
for line in read_file(contributors_path).splitlines():
|
||||||
|
author, _, _ = line.strip().partition(' (')
|
||||||
|
authors = author.split('/')
|
||||||
|
contributors.update(map(str.casefold, authors))
|
||||||
|
|
||||||
|
new_contributors = set()
|
||||||
|
for commit in commits:
|
||||||
|
for author in commit.authors:
|
||||||
|
author_folded = author.casefold()
|
||||||
|
if author_folded not in contributors:
|
||||||
|
contributors.add(author_folded)
|
||||||
|
new_contributors.add(author)
|
||||||
|
|
||||||
|
return sorted(new_contributors, key=str.casefold)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Create a changelog markdown from a git commit range')
|
||||||
|
parser.add_argument(
|
||||||
|
'commitish', default='HEAD', nargs='?',
|
||||||
|
help='The commitish to create the range from (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-v', '--verbosity', action='count', default=0,
|
||||||
|
help='increase verbosity (can be used twice)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--contributors', action='store_true',
|
||||||
|
help='update CONTRIBUTORS file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--contributors-path', type=Path, default=LOCATION_PATH.parent / 'CONTRIBUTORS',
|
||||||
|
help='path to the CONTRIBUTORS file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--no-override', action='store_true',
|
||||||
|
help='skip override json in commit generation (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--override-path', type=Path, default=LOCATION_PATH / 'changelog_override.json',
|
||||||
|
help='path to the changelog_override.json file')
|
||||||
|
parser.add_argument(
|
||||||
|
'--default-author', default='pukkandan',
|
||||||
|
help='the author to use without a author indicator (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo', default='yt-dlp/yt-dlp',
|
||||||
|
help='the github repository to use for the operations (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--collapsible', action='store_true',
|
||||||
|
help='make changelog collapsible (default: %(default)s)')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
||||||
|
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
||||||
|
|
||||||
|
commits = CommitRange(None, args.commitish, args.default_author)
|
||||||
|
|
||||||
|
if not args.no_override:
|
||||||
|
if args.override_path.exists():
|
||||||
|
overrides = json.loads(read_file(args.override_path))
|
||||||
|
commits.apply_overrides(overrides)
|
||||||
|
else:
|
||||||
|
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
||||||
|
|
||||||
|
logger.info(f'Loaded {len(commits)} commits')
|
||||||
|
|
||||||
|
new_contributors = get_new_contributors(args.contributors_path, commits)
|
||||||
|
if new_contributors:
|
||||||
|
if args.contributors:
|
||||||
|
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||||
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||||
|
|
||||||
|
print(Changelog(commits.groups(), args.repo, args.collapsible))
|
||||||
@@ -7,30 +7,71 @@ import sys
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import optparse
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import (
|
||||||
|
get_filename_args,
|
||||||
|
read_file,
|
||||||
|
read_version,
|
||||||
|
write_file,
|
||||||
|
)
|
||||||
|
|
||||||
def read(fname):
|
VERBOSE_TMPL = '''
|
||||||
with open(fname, encoding='utf-8') as f:
|
- type: checkboxes
|
||||||
return f.read()
|
id: verbose
|
||||||
|
attributes:
|
||||||
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
options:
|
||||||
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
|
required: true
|
||||||
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
|
required: false
|
||||||
|
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: log
|
||||||
|
attributes:
|
||||||
|
label: Complete Verbose Output
|
||||||
|
description: |
|
||||||
|
It should start like this:
|
||||||
|
placeholder: |
|
||||||
|
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||||
|
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||||
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
|
[debug] yt-dlp version %(version)s [9d339c4] (win32_exe)
|
||||||
|
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||||
|
[debug] Checking exe version: ffmpeg -bsfs
|
||||||
|
[debug] Checking exe version: ffprobe -bsfs
|
||||||
|
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||||
|
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||||
|
[debug] Proxy map: {}
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: %(version)s, Current version: %(version)s
|
||||||
|
yt-dlp is up to date (%(version)s)
|
||||||
|
<more lines>
|
||||||
|
render: shell
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
'''.strip()
|
||||||
|
|
||||||
|
NO_SKIP = '''
|
||||||
# Get the version without importing the package
|
- type: checkboxes
|
||||||
def read_version(fname):
|
attributes:
|
||||||
exec(compile(read(fname), fname, 'exec'))
|
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||||
return locals()['__version__']
|
description: Fill all fields even if you think it is irrelevant for the issue
|
||||||
|
options:
|
||||||
|
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
||||||
|
required: true
|
||||||
|
'''.strip()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
fields = {'version': read_version(), 'no_skip': NO_SKIP}
|
||||||
options, args = parser.parse_args()
|
fields['verbose'] = VERBOSE_TMPL % fields
|
||||||
if len(args) != 2:
|
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||||
parser.error('Expected an input and an output filename')
|
|
||||||
|
|
||||||
infile, outfile = args
|
infile, outfile = get_filename_args(has_infile=True)
|
||||||
with open(outfile, 'w', encoding='utf-8') as outf:
|
write_file(outfile, read_file(infile) % fields)
|
||||||
outf.write(
|
|
||||||
read(infile) % {'version': read_version('yt_dlp/version.py')})
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -2,41 +2,50 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import optparse
|
|
||||||
from inspect import getsource
|
from inspect import getsource
|
||||||
|
|
||||||
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
|
||||||
NO_ATTR = object()
|
NO_ATTR = object()
|
||||||
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_WORKING', '_NETRC_MACHINE', 'age_limit']
|
STATIC_CLASS_PROPERTIES = [
|
||||||
|
'IE_NAME', '_ENABLED', '_VALID_URL', # Used for URL matching
|
||||||
|
'_WORKING', 'IE_DESC', '_NETRC_MACHINE', 'SEARCH_KEY', # Used for --extractor-descriptions
|
||||||
|
'age_limit', # Used for --age-limit (evaluated)
|
||||||
|
'_RETURN_TYPE', # Accessed in CLI only with instance (evaluated)
|
||||||
|
]
|
||||||
CLASS_METHODS = [
|
CLASS_METHODS = [
|
||||||
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id', 'is_suitable'
|
'ie_key', 'suitable', '_match_valid_url', # Used for URL matching
|
||||||
|
'working', 'get_temp_id', '_match_id', # Accessed just before instance creation
|
||||||
|
'description', # Used for --extractor-descriptions
|
||||||
|
'is_suitable', # Used for --age-limit
|
||||||
|
'supports_login', 'is_single_video', # Accessed in CLI only with instance
|
||||||
]
|
]
|
||||||
IE_TEMPLATE = '''
|
IE_TEMPLATE = '''
|
||||||
class {name}({bases}):
|
class {name}({bases}):
|
||||||
_module = {module!r}
|
_module = {module!r}
|
||||||
'''
|
'''
|
||||||
with open('devscripts/lazy_load_template.py', encoding='utf-8') as f:
|
MODULE_TEMPLATE = read_file('devscripts/lazy_load_template.py')
|
||||||
MODULE_TEMPLATE = f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(usage='%prog [OUTFILE.py]')
|
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||||
args = parser.parse_args()[1] or ['yt_dlp/extractor/lazy_extractors.py']
|
|
||||||
if len(args) != 1:
|
|
||||||
parser.error('Expected only an output filename')
|
|
||||||
|
|
||||||
lazy_extractors_filename = args[0]
|
|
||||||
if os.path.exists(lazy_extractors_filename):
|
if os.path.exists(lazy_extractors_filename):
|
||||||
os.remove(lazy_extractors_filename)
|
os.remove(lazy_extractors_filename)
|
||||||
|
|
||||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||||
|
|
||||||
|
import yt_dlp.plugins
|
||||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||||
|
|
||||||
|
# Filter out plugins
|
||||||
|
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||||
|
|
||||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||||
module_src = '\n'.join((
|
module_src = '\n'.join((
|
||||||
MODULE_TEMPLATE,
|
MODULE_TEMPLATE,
|
||||||
@@ -46,20 +55,20 @@ def main():
|
|||||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||||
))
|
))
|
||||||
|
|
||||||
with open(lazy_extractors_filename, 'wt', encoding='utf-8') as f:
|
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||||
f.write(f'{module_src}\n')
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_ies():
|
def get_all_ies():
|
||||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||||
if os.path.exists(PLUGINS_DIRNAME):
|
if os.path.exists(PLUGINS_DIRNAME):
|
||||||
os.rename(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||||
|
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||||
try:
|
try:
|
||||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||||
finally:
|
finally:
|
||||||
if os.path.exists(BLOCKED_DIRNAME):
|
if os.path.exists(BLOCKED_DIRNAME):
|
||||||
os.rename(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||||
return _ALL_CLASSES
|
return _ALL_CLASSES
|
||||||
|
|
||||||
|
|
||||||
@@ -94,7 +103,7 @@ def sort_ies(ies, ignored_bases):
|
|||||||
for c in classes[:]:
|
for c in classes[:]:
|
||||||
bases = set(c.__bases__) - {object, *ignored_bases}
|
bases = set(c.__bases__) - {object, *ignored_bases}
|
||||||
restart = False
|
restart = False
|
||||||
for b in bases:
|
for b in sorted(bases, key=lambda x: x.__name__):
|
||||||
if b not in classes and b not in returned_classes:
|
if b not in classes and b not in returned_classes:
|
||||||
assert b.__name__ != 'GenericIE', 'Cannot inherit from GenericIE'
|
assert b.__name__ != 'GenericIE', 'Cannot inherit from GenericIE'
|
||||||
classes.insert(0, b)
|
classes.insert(0, b)
|
||||||
@@ -116,11 +125,6 @@ def build_lazy_ie(ie, name, attr_base):
|
|||||||
}.get(base.__name__, base.__name__) for base in ie.__bases__)
|
}.get(base.__name__, base.__name__) for base in ie.__bases__)
|
||||||
|
|
||||||
s = IE_TEMPLATE.format(name=name, module=ie.__module__, bases=bases)
|
s = IE_TEMPLATE.format(name=name, module=ie.__module__, bases=bases)
|
||||||
valid_url = getattr(ie, '_VALID_URL', None)
|
|
||||||
if not valid_url and hasattr(ie, '_make_valid_url'):
|
|
||||||
valid_url = ie._make_valid_url()
|
|
||||||
if valid_url:
|
|
||||||
s += f' _VALID_URL = {valid_url!r}\n'
|
|
||||||
return s + '\n'.join(extra_ie_code(ie, attr_base))
|
return s + '\n'.join(extra_ie_code(ie, attr_base))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ yt-dlp --help | make_readme.py
|
|||||||
This must be run in a console of correct width
|
This must be run in a console of correct width
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
from devscripts.utils import read_file, write_file
|
||||||
|
|
||||||
README_FILE = 'README.md'
|
README_FILE = 'README.md'
|
||||||
|
|
||||||
@@ -38,37 +45,49 @@ switch_col_width = len(re.search(r'(?m)^\s{5,}', options).group())
|
|||||||
delim = f'\n{" " * switch_col_width}'
|
delim = f'\n{" " * switch_col_width}'
|
||||||
|
|
||||||
PATCHES = (
|
PATCHES = (
|
||||||
( # Headings
|
( # Standardize `--update` message
|
||||||
|
r'(?m)^( -U, --update\s+).+(\n \s.+)*$',
|
||||||
|
r'\1Update this program to the latest version',
|
||||||
|
),
|
||||||
|
( # Headings
|
||||||
r'(?m)^ (\w.+\n)( (?=\w))?',
|
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||||
r'## \1'
|
r'## \1'
|
||||||
),
|
),
|
||||||
( # Do not split URLs
|
( # Fixup `--date` formatting
|
||||||
|
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
||||||
|
(rf'\1[now|today|yesterday][-N[day|week|month|year]].{delim}'
|
||||||
|
f'E.g. "--date today-2weeks" downloads only{delim}'
|
||||||
|
'videos uploaded on the same day two weeks ago'),
|
||||||
|
),
|
||||||
|
( # Do not split URLs
|
||||||
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
||||||
),
|
),
|
||||||
( # Do not split "words"
|
( # Do not split "words"
|
||||||
rf'(?m)({delim}\S+)+$',
|
rf'(?m)({delim}\S+)+$',
|
||||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
||||||
),
|
),
|
||||||
( # Allow overshooting last line
|
( # Allow overshooting last line
|
||||||
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||||
lambda mobj: (mobj.group().replace(delim, ' ')
|
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||||
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||||
else mobj.group())
|
else mobj.group())
|
||||||
),
|
),
|
||||||
( # Avoid newline when a space is available b/w switch and description
|
( # Avoid newline when a space is available b/w switch and description
|
||||||
DISABLE_PATCH, # This creates issues with prepare_manpage
|
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||||
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||||
r'\1 '
|
r'\1 '
|
||||||
),
|
),
|
||||||
|
( # Replace brackets with a Markdown link
|
||||||
|
r'SponsorBlock API \((http.+)\)',
|
||||||
|
r'[SponsorBlock API](\1)'
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(README_FILE, encoding='utf-8') as f:
|
readme = read_file(README_FILE)
|
||||||
readme = f.read()
|
|
||||||
|
|
||||||
with open(README_FILE, 'w', encoding='utf-8') as f:
|
write_file(README_FILE, ''.join((
|
||||||
f.write(''.join((
|
take_section(readme, end=f'## {OPTIONS_START}'),
|
||||||
take_section(readme, end=f'## {OPTIONS_START}'),
|
functools.reduce(apply_patch, PATCHES, options),
|
||||||
functools.reduce(apply_patch, PATCHES, options),
|
take_section(readme, f'# {OPTIONS_END}'),
|
||||||
take_section(readme, f'# {OPTIONS_END}'),
|
)))
|
||||||
)))
|
|
||||||
|
|||||||
@@ -7,21 +7,13 @@ import sys
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import optparse
|
from devscripts.utils import get_filename_args, write_file
|
||||||
|
|
||||||
from yt_dlp.extractor import list_extractor_classes
|
from yt_dlp.extractor import list_extractor_classes
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
|
||||||
_, args = parser.parse_args()
|
|
||||||
if len(args) != 1:
|
|
||||||
parser.error('Expected an output filename')
|
|
||||||
|
|
||||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||||
|
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
||||||
with open(args[0], 'w', encoding='utf-8') as outf:
|
|
||||||
outf.write(f'# Supported sites\n{out}\n')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -1,9 +1,22 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import optparse
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import (
|
||||||
|
compose_functions,
|
||||||
|
get_filename_args,
|
||||||
|
read_file,
|
||||||
|
write_file,
|
||||||
|
)
|
||||||
|
|
||||||
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
README_FILE = os.path.join(ROOT_DIR, 'README.md')
|
README_FILE = os.path.join(ROOT_DIR, 'README.md')
|
||||||
|
|
||||||
@@ -22,25 +35,6 @@ yt\-dlp \- A youtube-dl fork with additional features and patches
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
|
||||||
_, args = parser.parse_args()
|
|
||||||
if len(args) != 1:
|
|
||||||
parser.error('Expected an output filename')
|
|
||||||
|
|
||||||
outfile, = args
|
|
||||||
|
|
||||||
with open(README_FILE, encoding='utf-8') as f:
|
|
||||||
readme = f.read()
|
|
||||||
|
|
||||||
readme = filter_excluded_sections(readme)
|
|
||||||
readme = move_sections(readme)
|
|
||||||
readme = filter_options(readme)
|
|
||||||
|
|
||||||
with open(outfile, 'w', encoding='utf-8') as outf:
|
|
||||||
outf.write(PREFIX + readme)
|
|
||||||
|
|
||||||
|
|
||||||
def filter_excluded_sections(readme):
|
def filter_excluded_sections(readme):
|
||||||
EXCLUDED_SECTION_BEGIN_STRING = re.escape('<!-- MANPAGE: BEGIN EXCLUDED SECTION -->')
|
EXCLUDED_SECTION_BEGIN_STRING = re.escape('<!-- MANPAGE: BEGIN EXCLUDED SECTION -->')
|
||||||
EXCLUDED_SECTION_END_STRING = re.escape('<!-- MANPAGE: END EXCLUDED SECTION -->')
|
EXCLUDED_SECTION_END_STRING = re.escape('<!-- MANPAGE: END EXCLUDED SECTION -->')
|
||||||
@@ -92,5 +86,12 @@ def filter_options(readme):
|
|||||||
return readme.replace(section, options, 1)
|
return readme.replace(section, options, 1)
|
||||||
|
|
||||||
|
|
||||||
|
TRANSFORM = compose_functions(filter_excluded_sections, move_sections, filter_options)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
write_file(get_filename_args(), PREFIX + TRANSFORM(read_file(README_FILE)))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
#!/usr/bin/env sh
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
if [ -z $1 ]; then
|
if [ -z "$1" ]; then
|
||||||
test_set='test'
|
test_set='test'
|
||||||
elif [ $1 = 'core' ]; then
|
elif [ "$1" = 'core' ]; then
|
||||||
test_set="-m not download"
|
test_set="-m not download"
|
||||||
elif [ $1 = 'download' ]; then
|
elif [ "$1" = 'download' ]; then
|
||||||
test_set="-m download"
|
test_set="-m download"
|
||||||
else
|
else
|
||||||
echo 'Invalid test type "'$1'". Use "core" | "download"'
|
echo 'Invalid test type "'"$1"'". Use "core" | "download"'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
36
devscripts/set-variant.py
Normal file
36
devscripts/set-variant.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import re
|
||||||
|
|
||||||
|
from devscripts.utils import compose_functions, read_file, write_file
|
||||||
|
|
||||||
|
VERSION_FILE = 'yt_dlp/version.py'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_options():
|
||||||
|
parser = argparse.ArgumentParser(description='Set the build variant of the package')
|
||||||
|
parser.add_argument('variant', help='Name of the variant')
|
||||||
|
parser.add_argument('-M', '--update-message', default=None, help='Message to show in -U')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def property_setter(name, value):
|
||||||
|
return functools.partial(re.sub, rf'(?m)^{name}\s*=\s*.+$', f'{name} = {value!r}')
|
||||||
|
|
||||||
|
|
||||||
|
opts = parse_options()
|
||||||
|
transform = compose_functions(
|
||||||
|
property_setter('VARIANT', opts.variant),
|
||||||
|
property_setter('UPDATE_HINT', opts.update_message)
|
||||||
|
)
|
||||||
|
|
||||||
|
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
||||||
@@ -1,5 +1,10 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
Usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
||||||
|
version can be either 0-aligned (yt-dlp version) or normalized (PyPi version)
|
||||||
|
"""
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -11,8 +16,7 @@ import json
|
|||||||
import re
|
import re
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
# usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
from devscripts.utils import read_file, write_file
|
||||||
# version can be either 0-aligned (yt-dlp version) or normalized (PyPl version)
|
|
||||||
|
|
||||||
filename, version = sys.argv[1:]
|
filename, version = sys.argv[1:]
|
||||||
|
|
||||||
@@ -27,11 +31,9 @@ tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.
|
|||||||
sha256sum = tarball_file['digests']['sha256']
|
sha256sum = tarball_file['digests']['sha256']
|
||||||
url = tarball_file['url']
|
url = tarball_file['url']
|
||||||
|
|
||||||
with open(filename) as r:
|
formulae_text = read_file(filename)
|
||||||
formulae_text = r.read()
|
|
||||||
|
|
||||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text)
|
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text, count=1)
|
||||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text)
|
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text, count=1)
|
||||||
|
|
||||||
with open(filename, 'w') as w:
|
write_file(filename, formulae_text)
|
||||||
w.write(formulae_text)
|
|
||||||
|
|||||||
@@ -7,43 +7,65 @@ import sys
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
import subprocess
|
import argparse
|
||||||
|
import contextlib
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
with open('yt_dlp/version.py') as f:
|
from devscripts.utils import read_version, run_process, write_file
|
||||||
exec(compile(f.read(), 'yt_dlp/version.py', 'exec'))
|
|
||||||
old_version = locals()['__version__']
|
|
||||||
|
|
||||||
old_version_list = old_version.split('.')
|
|
||||||
|
|
||||||
old_ver = '.'.join(old_version_list[:3])
|
def get_new_version(version, revision):
|
||||||
old_rev = old_version_list[3] if len(old_version_list) > 3 else ''
|
if not version:
|
||||||
|
version = datetime.now(timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
ver = datetime.utcnow().strftime("%Y.%m.%d")
|
if revision:
|
||||||
|
assert revision.isdigit(), 'Revision must be a number'
|
||||||
|
else:
|
||||||
|
old_version = read_version().split('.')
|
||||||
|
if version.split('.') == old_version[:3]:
|
||||||
|
revision = str(int((old_version + [0])[3]) + 1)
|
||||||
|
|
||||||
rev = (sys.argv[1:] or [''])[0] # Use first argument, if present as revision number
|
return f'{version}.{revision}' if revision else version
|
||||||
if not rev:
|
|
||||||
rev = str(int(old_rev or 0) + 1) if old_ver == ver else ''
|
|
||||||
|
|
||||||
VERSION = '.'.join((ver, rev)) if rev else ver
|
|
||||||
|
|
||||||
try:
|
def get_git_head():
|
||||||
sp = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
|
with contextlib.suppress(Exception):
|
||||||
GIT_HEAD = sp.communicate()[0].decode().strip() or None
|
return run_process('git', 'rev-parse', 'HEAD').stdout.strip()
|
||||||
except Exception:
|
|
||||||
GIT_HEAD = None
|
|
||||||
|
|
||||||
VERSION_FILE = f'''\
|
|
||||||
|
VERSION_TEMPLATE = '''\
|
||||||
# Autogenerated by devscripts/update-version.py
|
# Autogenerated by devscripts/update-version.py
|
||||||
|
|
||||||
__version__ = {VERSION!r}
|
__version__ = {version!r}
|
||||||
|
|
||||||
RELEASE_GIT_HEAD = {GIT_HEAD!r}
|
RELEASE_GIT_HEAD = {git_head!r}
|
||||||
|
|
||||||
|
VARIANT = None
|
||||||
|
|
||||||
|
UPDATE_HINT = None
|
||||||
|
|
||||||
|
CHANNEL = {channel!r}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
with open('yt_dlp/version.py', 'wt') as f:
|
if __name__ == '__main__':
|
||||||
f.write(VERSION_FILE)
|
parser = argparse.ArgumentParser(description='Update the version.py file')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--channel', default='stable',
|
||||||
|
help='Select update channel (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--output', default='yt_dlp/version.py',
|
||||||
|
help='The output file to write to (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'version', nargs='?', default=None,
|
||||||
|
help='A version or revision to use instead of generating one')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
print('::set-output name=ytdlp_version::' + VERSION)
|
git_head = get_git_head()
|
||||||
print(f'\nVersion = {VERSION}, Git HEAD = {GIT_HEAD}')
|
version = (
|
||||||
|
args.version if args.version and '.' in args.version
|
||||||
|
else get_new_version(None, args.version))
|
||||||
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
|
version=version, git_head=git_head, channel=args.channel))
|
||||||
|
|
||||||
|
print(f'version={version} ({args.channel}), head={git_head}')
|
||||||
|
|||||||
46
devscripts/utils.py
Normal file
46
devscripts/utils.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(fname):
|
||||||
|
with open(fname, encoding='utf-8') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(fname, content, mode='w'):
|
||||||
|
with open(fname, mode, encoding='utf-8') as f:
|
||||||
|
return f.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def read_version(fname='yt_dlp/version.py'):
|
||||||
|
"""Get the version without importing the package"""
|
||||||
|
exec(compile(read_file(fname), fname, 'exec'))
|
||||||
|
return locals()['__version__']
|
||||||
|
|
||||||
|
|
||||||
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
if has_infile:
|
||||||
|
parser.add_argument('infile', help='Input file')
|
||||||
|
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
||||||
|
parser.add_argument('outfile', **kwargs, help='Output file')
|
||||||
|
|
||||||
|
opts = parser.parse_args()
|
||||||
|
if has_infile:
|
||||||
|
return opts.infile, opts.outfile
|
||||||
|
return opts.outfile
|
||||||
|
|
||||||
|
|
||||||
|
def compose_functions(*functions):
|
||||||
|
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
||||||
|
|
||||||
|
|
||||||
|
def run_process(*args, **kwargs):
|
||||||
|
kwargs.setdefault('text', True)
|
||||||
|
kwargs.setdefault('check', True)
|
||||||
|
kwargs.setdefault('capture_output', True)
|
||||||
|
if kwargs['text']:
|
||||||
|
kwargs.setdefault('encoding', 'utf-8')
|
||||||
|
kwargs.setdefault('errors', 'replace')
|
||||||
|
return subprocess.run(args, **kwargs)
|
||||||
29
public.key
Normal file
29
public.key
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
|
mQINBGP78C4BEAD0rF9zjGPAt0thlt5C1ebzccAVX7Nb1v+eqQjk+WEZdTETVCg3
|
||||||
|
WAM5ngArlHdm/fZqzUgO+pAYrB60GKeg7ffUDf+S0XFKEZdeRLYeAaqqKhSibVal
|
||||||
|
DjvOBOztu3W607HLETQAqA7wTPuIt2WqmpL60NIcyr27LxqmgdN3mNvZ2iLO+bP0
|
||||||
|
nKR/C+PgE9H4ytywDa12zMx6PmZCnVOOOu6XZEFmdUxxdQ9fFDqd9LcBKY2LDOcS
|
||||||
|
Yo1saY0YWiZWHtzVoZu1kOzjnS5Fjq/yBHJLImDH7pNxHm7s/PnaurpmQFtDFruk
|
||||||
|
t+2lhDnpKUmGr/I/3IHqH/X+9nPoS4uiqQ5HpblB8BK+4WfpaiEg75LnvuOPfZIP
|
||||||
|
KYyXa/0A7QojMwgOrD88ozT+VCkKkkJ+ijXZ7gHNjmcBaUdKK7fDIEOYI63Lyc6Q
|
||||||
|
WkGQTigFffSUXWHDCO9aXNhP3ejqFWgGMtCUsrbkcJkWuWY7q5ARy/05HbSM3K4D
|
||||||
|
U9eqtnxmiV1WQ8nXuI9JgJQRvh5PTkny5LtxqzcmqvWO9TjHBbrs14BPEO9fcXxK
|
||||||
|
L/CFBbzXDSvvAgArdqqlMoncQ/yicTlfL6qzJ8EKFiqW14QMTdAn6SuuZTodXCTi
|
||||||
|
InwoT7WjjuFPKKdvfH1GP4bnqdzTnzLxCSDIEtfyfPsIX+9GI7Jkk/zZjQARAQAB
|
||||||
|
tDdTaW1vbiBTYXdpY2tpICh5dC1kbHAgc2lnbmluZyBrZXkpIDxjb250YWN0QGdy
|
||||||
|
dWI0ay54eXo+iQJOBBMBCgA4FiEErAy75oSNaoc0ZK9OV89lkztadYEFAmP78C4C
|
||||||
|
GwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQV89lkztadYEVqQ//cW7TxhXg
|
||||||
|
7Xbh2EZQzXml0egn6j8QaV9KzGragMiShrlvTO2zXfLXqyizrFP4AspgjSn/4NrI
|
||||||
|
8mluom+Yi+qr7DXT4BjQqIM9y3AjwZPdywe912Lxcw52NNoPZCm24I9T7ySc8lmR
|
||||||
|
FQvZC0w4H/VTNj/2lgJ1dwMflpwvNRiWa5YzcFGlCUeDIPskLx9++AJE+xwU3LYm
|
||||||
|
jQQsPBqpHHiTBEJzMLl+rfd9Fg4N+QNzpFkTDW3EPerLuvJniSBBwZthqxeAtw4M
|
||||||
|
UiAXh6JvCc2hJkKCoygRfM281MeolvmsGNyQm+axlB0vyldiPP6BnaRgZlx+l6MU
|
||||||
|
cPqgHblb7RW5j9lfr6OYL7SceBIHNv0CFrt1OnkGo/tVMwcs8LH3Ae4a7UJlIceL
|
||||||
|
V54aRxSsZU7w4iX+PB79BWkEsQzwKrUuJVOeL4UDwWajp75OFaUqbS/slDDVXvK5
|
||||||
|
OIeuth3mA/adjdvgjPxhRQjA3l69rRWIJDrqBSHldmRsnX6cvXTDy8wSXZgy51lP
|
||||||
|
m4IVLHnCy9m4SaGGoAsfTZS0cC9FgjUIyTyrq9M67wOMpUxnuB0aRZgJE1DsI23E
|
||||||
|
qdvcSNVlO+39xM/KPWUEh6b83wMn88QeW+DCVGWACQq5N3YdPnAJa50617fGbY6I
|
||||||
|
gXIoRHXkDqe23PZ/jURYCv0sjVtjPoVC+bg=
|
||||||
|
=bJkn
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
||||||
67
pyinst.py
67
pyinst.py
@@ -1,24 +1,31 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import platform
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
import platform
|
||||||
|
|
||||||
from PyInstaller.__main__ import run as run_pyinstaller
|
from PyInstaller.__main__ import run as run_pyinstaller
|
||||||
|
|
||||||
OS_NAME, ARCH = sys.platform, platform.architecture()[0][:2]
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
opts = parse_options()
|
opts, version = parse_options(), read_version()
|
||||||
version = read_version('yt_dlp/version.py')
|
|
||||||
|
|
||||||
onedir = '--onedir' in opts or '-D' in opts
|
onedir = '--onedir' in opts or '-D' in opts
|
||||||
if not onedir and '-F' not in opts and '--onefile' not in opts:
|
if not onedir and '-F' not in opts and '--onefile' not in opts:
|
||||||
opts.append('--onefile')
|
opts.append('--onefile')
|
||||||
|
|
||||||
name, final_file = exe(onedir)
|
name, final_file = exe(onedir)
|
||||||
print(f'Building yt-dlp v{version} {ARCH}bit for {OS_NAME} with options {opts}')
|
print(f'Building yt-dlp v{version} for {OS_NAME} {platform.machine()} with options {opts}')
|
||||||
print('Remember to update the version using "devscripts/update-version.py"')
|
print('Remember to update the version using "devscripts/update-version.py"')
|
||||||
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
|
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
|
||||||
print('WARNING: Building without lazy_extractors. Run '
|
print('WARNING: Building without lazy_extractors. Run '
|
||||||
@@ -30,10 +37,7 @@ def main():
|
|||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
# NB: Modules that are only imported dynamically must be added here.
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
# --collect-submodules may not work correctly if user has a yt-dlp installed via PIP
|
|
||||||
'--hidden-import=yt_dlp.compat._legacy',
|
|
||||||
*dependency_options(),
|
|
||||||
*opts,
|
*opts,
|
||||||
'yt_dlp/__main__.py',
|
'yt_dlp/__main__.py',
|
||||||
]
|
]
|
||||||
@@ -53,19 +57,12 @@ def parse_options():
|
|||||||
return opts
|
return opts
|
||||||
|
|
||||||
|
|
||||||
# Get the version from yt_dlp/version.py without importing the package
|
|
||||||
def read_version(fname):
|
|
||||||
with open(fname, encoding='utf-8') as f:
|
|
||||||
exec(compile(f.read(), fname, 'exec'))
|
|
||||||
return locals()['__version__']
|
|
||||||
|
|
||||||
|
|
||||||
def exe(onedir):
|
def exe(onedir):
|
||||||
"""@returns (name, path)"""
|
"""@returns (name, path)"""
|
||||||
name = '_'.join(filter(None, (
|
name = '_'.join(filter(None, (
|
||||||
'yt-dlp',
|
'yt-dlp',
|
||||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||||
ARCH == '32' and 'x86'
|
MACHINE,
|
||||||
)))
|
)))
|
||||||
return name, ''.join(filter(None, (
|
return name, ''.join(filter(None, (
|
||||||
'dist/',
|
'dist/',
|
||||||
@@ -80,30 +77,6 @@ def version_to_list(version):
|
|||||||
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
return list(map(int, version_list)) + [0] * (4 - len(version_list))
|
||||||
|
|
||||||
|
|
||||||
def dependency_options():
|
|
||||||
# Due to the current implementation, these are auto-detected, but explicitly add them just in case
|
|
||||||
dependencies = [pycryptodome_module(), 'mutagen', 'brotli', 'certifi', 'websockets']
|
|
||||||
excluded_modules = ['test', 'ytdlp_plugins', 'youtube_dl', 'youtube_dlc']
|
|
||||||
|
|
||||||
yield from (f'--hidden-import={module}' for module in dependencies)
|
|
||||||
yield '--collect-submodules=websockets'
|
|
||||||
yield from (f'--exclude-module={module}' for module in excluded_modules)
|
|
||||||
|
|
||||||
|
|
||||||
def pycryptodome_module():
|
|
||||||
try:
|
|
||||||
import Cryptodome # noqa: F401
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import Crypto # noqa: F401
|
|
||||||
print('WARNING: Using Crypto since Cryptodome is not available. '
|
|
||||||
'Install with: pip install pycryptodomex', file=sys.stderr)
|
|
||||||
return 'Crypto'
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
return 'Cryptodome'
|
|
||||||
|
|
||||||
|
|
||||||
def set_version_info(exe, version):
|
def set_version_info(exe, version):
|
||||||
if OS_NAME == 'win32':
|
if OS_NAME == 'win32':
|
||||||
windows_set_version(exe, version)
|
windows_set_version(exe, version)
|
||||||
@@ -112,7 +85,6 @@ def set_version_info(exe, version):
|
|||||||
def windows_set_version(exe, version):
|
def windows_set_version(exe, version):
|
||||||
from PyInstaller.utils.win32.versioninfo import (
|
from PyInstaller.utils.win32.versioninfo import (
|
||||||
FixedFileInfo,
|
FixedFileInfo,
|
||||||
SetVersion,
|
|
||||||
StringFileInfo,
|
StringFileInfo,
|
||||||
StringStruct,
|
StringStruct,
|
||||||
StringTable,
|
StringTable,
|
||||||
@@ -121,8 +93,13 @@ def windows_set_version(exe, version):
|
|||||||
VSVersionInfo,
|
VSVersionInfo,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from PyInstaller.utils.win32.versioninfo import SetVersion
|
||||||
|
except ImportError: # Pyinstaller >= 5.8
|
||||||
|
from PyInstaller.utils.win32.versioninfo import write_version_info_to_executable as SetVersion
|
||||||
|
|
||||||
version_list = version_to_list(version)
|
version_list = version_to_list(version)
|
||||||
suffix = '_x86' if ARCH == '32' else ''
|
suffix = MACHINE and f'_{MACHINE}'
|
||||||
SetVersion(exe, VSVersionInfo(
|
SetVersion(exe, VSVersionInfo(
|
||||||
ffi=FixedFileInfo(
|
ffi=FixedFileInfo(
|
||||||
filevers=version_list,
|
filevers=version_list,
|
||||||
@@ -136,9 +113,9 @@ def windows_set_version(exe, version):
|
|||||||
),
|
),
|
||||||
kids=[
|
kids=[
|
||||||
StringFileInfo([StringTable('040904B0', [
|
StringFileInfo([StringTable('040904B0', [
|
||||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface.' % suffix),
|
StringStruct('Comments', 'yt-dlp%s Command Line Interface' % suffix),
|
||||||
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
||||||
StringStruct('FileDescription', 'yt-dlp%s' % (' (32 Bit)' if ARCH == '32' else '')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||||
|
|||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[build-system]
|
||||||
|
build-backend = 'setuptools.build_meta'
|
||||||
|
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
||||||
|
# https://github.com/pypa/distutils/issues/17
|
||||||
|
requires = ['setuptools > 50']
|
||||||
14
setup.cfg
14
setup.cfg
@@ -10,6 +10,14 @@ per_file_ignores =
|
|||||||
devscripts/lazy_load_template.py: F401
|
devscripts/lazy_load_template.py: F401
|
||||||
|
|
||||||
|
|
||||||
|
[autoflake]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
ignore-pass-after-docstring = true
|
||||||
|
remove-all-unused-imports = true
|
||||||
|
remove-duplicate-keys = true
|
||||||
|
remove-unused-variables = true
|
||||||
|
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
addopts = -ra -v --strict-markers
|
addopts = -ra -v --strict-markers
|
||||||
markers =
|
markers =
|
||||||
@@ -18,12 +26,12 @@ markers =
|
|||||||
|
|
||||||
[tox:tox]
|
[tox:tox]
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
envlist = py{36,37,38,39,310},pypy{36,37,38,39}
|
envlist = py{36,37,38,39,310,311},pypy{36,37,38,39}
|
||||||
skip_missing_interpreters = true
|
skip_missing_interpreters = true
|
||||||
|
|
||||||
[testenv] # tox
|
[testenv] # tox
|
||||||
deps =
|
deps =
|
||||||
pytest
|
pytest
|
||||||
commands = pytest {posargs:"-m not download"}
|
commands = pytest {posargs:"-m not download"}
|
||||||
passenv = HOME # For test_compat_expanduser
|
passenv = HOME # For test_compat_expanduser
|
||||||
setenv =
|
setenv =
|
||||||
@@ -31,7 +39,7 @@ setenv =
|
|||||||
|
|
||||||
|
|
||||||
[isort]
|
[isort]
|
||||||
py_version = 36
|
py_version = 37
|
||||||
multi_line_output = VERTICAL_HANGING_INDENT
|
multi_line_output = VERTICAL_HANGING_INDENT
|
||||||
line_length = 80
|
line_length = 80
|
||||||
reverse_relative = true
|
reverse_relative = true
|
||||||
|
|||||||
189
setup.py
189
setup.py
@@ -1,7 +1,12 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os.path
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
import subprocess
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -10,63 +15,64 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import Command, setup
|
from distutils.core import Command, setup
|
||||||
setuptools_available = False
|
setuptools_available = False
|
||||||
from distutils.spawn import spawn
|
|
||||||
|
|
||||||
|
from devscripts.utils import read_file, read_version
|
||||||
|
|
||||||
def read(fname):
|
VERSION = read_version()
|
||||||
with open(fname, encoding='utf-8') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
# Get the version from yt_dlp/version.py without importing the package
|
|
||||||
def read_version(fname):
|
|
||||||
exec(compile(read(fname), fname, 'exec'))
|
|
||||||
return locals()['__version__']
|
|
||||||
|
|
||||||
|
|
||||||
VERSION = read_version('yt_dlp/version.py')
|
|
||||||
|
|
||||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
||||||
|
|
||||||
LONG_DESCRIPTION = '\n\n'.join((
|
LONG_DESCRIPTION = '\n\n'.join((
|
||||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
||||||
read('README.md')))
|
read_file('README.md')))
|
||||||
|
|
||||||
REQUIREMENTS = read('requirements.txt').splitlines()
|
REQUIREMENTS = read_file('requirements.txt').splitlines()
|
||||||
|
|
||||||
|
|
||||||
if sys.argv[1:2] == ['py2exe']:
|
def packages():
|
||||||
import py2exe # noqa: F401
|
if setuptools_available:
|
||||||
|
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
||||||
|
|
||||||
|
return [
|
||||||
|
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def py2exe_params():
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||||
'The recommended way is to use "pyinst.py" to build using pyinstaller')
|
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||||
params = {
|
|
||||||
|
return {
|
||||||
'console': [{
|
'console': [{
|
||||||
'script': './yt_dlp/__main__.py',
|
'script': './yt_dlp/__main__.py',
|
||||||
'dest_base': 'yt-dlp',
|
'dest_base': 'yt-dlp',
|
||||||
|
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||||
|
}],
|
||||||
|
'version_info': {
|
||||||
'version': VERSION,
|
'version': VERSION,
|
||||||
'description': DESCRIPTION,
|
'description': DESCRIPTION,
|
||||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
'comments': LONG_DESCRIPTION.split('\n')[0],
|
||||||
'product_name': 'yt-dlp',
|
'product_name': 'yt-dlp',
|
||||||
'product_version': VERSION,
|
'product_version': VERSION,
|
||||||
}],
|
|
||||||
'options': {
|
|
||||||
'py2exe': {
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
# Modules that are only imported dynamically must be added here
|
|
||||||
'includes': ['yt_dlp.compat._legacy'],
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
'zipfile': None
|
'options': {
|
||||||
|
'bundle_files': 0,
|
||||||
|
'compressed': 1,
|
||||||
|
'optimize': 2,
|
||||||
|
'dist_dir': './dist',
|
||||||
|
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
||||||
|
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||||
|
# Modules that are only imported dynamically must be added here
|
||||||
|
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
||||||
|
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
||||||
|
},
|
||||||
|
'zipfile': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
else:
|
|
||||||
|
def build_params():
|
||||||
files_spec = [
|
files_spec = [
|
||||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
||||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
||||||
@@ -74,25 +80,26 @@ else:
|
|||||||
('share/doc/yt_dlp', ['README.txt']),
|
('share/doc/yt_dlp', ['README.txt']),
|
||||||
('share/man/man1', ['yt-dlp.1'])
|
('share/man/man1', ['yt-dlp.1'])
|
||||||
]
|
]
|
||||||
root = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
data_files = []
|
data_files = []
|
||||||
for dirname, files in files_spec:
|
for dirname, files in files_spec:
|
||||||
resfiles = []
|
resfiles = []
|
||||||
for fn in files:
|
for fn in files:
|
||||||
if not os.path.exists(fn):
|
if not os.path.exists(fn):
|
||||||
warnings.warn('Skipping file %s since it is not present. Try running `make pypi-files` first' % fn)
|
warnings.warn(f'Skipping file {fn} since it is not present. Try running " make pypi-files " first')
|
||||||
else:
|
else:
|
||||||
resfiles.append(fn)
|
resfiles.append(fn)
|
||||||
data_files.append((dirname, resfiles))
|
data_files.append((dirname, resfiles))
|
||||||
|
|
||||||
params = {
|
params = {'data_files': data_files}
|
||||||
'data_files': data_files,
|
|
||||||
}
|
|
||||||
|
|
||||||
if setuptools_available:
|
if setuptools_available:
|
||||||
params['entry_points'] = {'console_scripts': ['yt-dlp = yt_dlp:main']}
|
params['entry_points'] = {
|
||||||
|
'console_scripts': ['yt-dlp = yt_dlp:main'],
|
||||||
|
'pyinstaller40': ['hook-dirs = yt_dlp.__pyinstaller:get_hook_dirs'],
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
params['scripts'] = ['yt-dlp']
|
params['scripts'] = ['yt-dlp']
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
class build_lazy_extractors(Command):
|
class build_lazy_extractors(Command):
|
||||||
@@ -106,52 +113,64 @@ class build_lazy_extractors(Command):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
spawn([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'],
|
if self.dry_run:
|
||||||
dry_run=self.dry_run)
|
print('Skipping build of lazy extractors in dry run mode')
|
||||||
|
return
|
||||||
|
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
||||||
|
|
||||||
|
|
||||||
if setuptools_available:
|
def main():
|
||||||
packages = find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins'))
|
if sys.argv[1:2] == ['py2exe']:
|
||||||
else:
|
params = py2exe_params()
|
||||||
packages = ['yt_dlp', 'yt_dlp.downloader', 'yt_dlp.extractor', 'yt_dlp.postprocessor']
|
try:
|
||||||
|
from py2exe import freeze
|
||||||
|
except ImportError:
|
||||||
|
import py2exe # noqa: F401
|
||||||
|
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
||||||
|
params['console'][0].update(params.pop('version_info'))
|
||||||
|
params['options'] = {'py2exe': params.pop('options')}
|
||||||
|
else:
|
||||||
|
return freeze(**params)
|
||||||
|
else:
|
||||||
|
params = build_params()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='yt-dlp',
|
||||||
|
version=VERSION,
|
||||||
|
maintainer='pukkandan',
|
||||||
|
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||||
|
description=DESCRIPTION,
|
||||||
|
long_description=LONG_DESCRIPTION,
|
||||||
|
long_description_content_type='text/markdown',
|
||||||
|
url='https://github.com/yt-dlp/yt-dlp',
|
||||||
|
packages=packages(),
|
||||||
|
install_requires=REQUIREMENTS,
|
||||||
|
python_requires='>=3.7',
|
||||||
|
project_urls={
|
||||||
|
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
||||||
|
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
||||||
|
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
||||||
|
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
||||||
|
},
|
||||||
|
classifiers=[
|
||||||
|
'Topic :: Multimedia :: Video',
|
||||||
|
'Development Status :: 5 - Production/Stable',
|
||||||
|
'Environment :: Console',
|
||||||
|
'Programming Language :: Python',
|
||||||
|
'Programming Language :: Python :: 3.7',
|
||||||
|
'Programming Language :: Python :: 3.8',
|
||||||
|
'Programming Language :: Python :: 3.9',
|
||||||
|
'Programming Language :: Python :: 3.10',
|
||||||
|
'Programming Language :: Python :: 3.11',
|
||||||
|
'Programming Language :: Python :: Implementation',
|
||||||
|
'Programming Language :: Python :: Implementation :: CPython',
|
||||||
|
'Programming Language :: Python :: Implementation :: PyPy',
|
||||||
|
'License :: Public Domain',
|
||||||
|
'Operating System :: OS Independent',
|
||||||
|
],
|
||||||
|
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
||||||
|
**params
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
setup(
|
main()
|
||||||
name='yt-dlp',
|
|
||||||
version=VERSION,
|
|
||||||
maintainer='pukkandan',
|
|
||||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://github.com/yt-dlp/yt-dlp',
|
|
||||||
packages=packages,
|
|
||||||
install_requires=REQUIREMENTS,
|
|
||||||
project_urls={
|
|
||||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
|
||||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
|
||||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
|
||||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
|
||||||
},
|
|
||||||
classifiers=[
|
|
||||||
'Topic :: Multimedia :: Video',
|
|
||||||
'Development Status :: 5 - Production/Stable',
|
|
||||||
'Environment :: Console',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
|
||||||
'Programming Language :: Python :: 3.7',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: 3.9',
|
|
||||||
'Programming Language :: Python :: 3.10',
|
|
||||||
'Programming Language :: Python :: 3.11',
|
|
||||||
'Programming Language :: Python :: Implementation',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
|
||||||
'License :: Public Domain',
|
|
||||||
'Operating System :: OS Independent',
|
|
||||||
],
|
|
||||||
python_requires='>=3.6',
|
|
||||||
|
|
||||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
|
||||||
**params
|
|
||||||
)
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
21
test/conftest.py
Normal file
21
test/conftest.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import functools
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.networking import RequestHandler
|
||||||
|
from yt_dlp.networking.common import _REQUEST_HANDLERS
|
||||||
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def handler(request):
|
||||||
|
RH_KEY = request.param
|
||||||
|
if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
|
||||||
|
handler = RH_KEY
|
||||||
|
elif RH_KEY in _REQUEST_HANDLERS:
|
||||||
|
handler = _REQUEST_HANDLERS[RH_KEY]
|
||||||
|
else:
|
||||||
|
pytest.skip(f'{RH_KEY} request handler is not available')
|
||||||
|
|
||||||
|
return functools.partial(handler, logger=FakeLogger)
|
||||||
@@ -92,6 +92,13 @@ def gettestcases(include_onlymatching=False):
|
|||||||
yield from ie.get_testcases(include_onlymatching)
|
yield from ie.get_testcases(include_onlymatching)
|
||||||
|
|
||||||
|
|
||||||
|
def getwebpagetestcases():
|
||||||
|
for ie in yt_dlp.extractor.gen_extractors():
|
||||||
|
for tc in ie.get_webpage_testcases():
|
||||||
|
tc.setdefault('add_ie', []).append('Generic')
|
||||||
|
yield tc
|
||||||
|
|
||||||
|
|
||||||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
@@ -187,8 +194,8 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
|
'formats', 'thumbnails', 'subtitles', 'automatic_captions', 'comments', 'entries',
|
||||||
|
|
||||||
# Auto-generated
|
# Auto-generated
|
||||||
'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch',
|
'autonumber', 'playlist', 'format_index', 'video_ext', 'audio_ext', 'duration_string', 'epoch', 'n_entries',
|
||||||
'fulltitle', 'extractor', 'extractor_key', 'filepath', 'infojson_filename', 'original_url', 'n_entries',
|
'fulltitle', 'extractor', 'extractor_key', 'filename', 'filepath', 'infojson_filename', 'original_url',
|
||||||
|
|
||||||
# Only live_status needs to be checked
|
# Only live_status needs to be checked
|
||||||
'is_live', 'was_live',
|
'is_live', 'was_live',
|
||||||
@@ -215,6 +222,10 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
||||||
test_info_dict.pop('display_id')
|
test_info_dict.pop('display_id')
|
||||||
|
|
||||||
|
# Check url for flat entries
|
||||||
|
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
||||||
|
test_info_dict['url'] = got_dict['url']
|
||||||
|
|
||||||
return test_info_dict
|
return test_info_dict
|
||||||
|
|
||||||
|
|
||||||
@@ -228,8 +239,9 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
for key in mandatory_fields:
|
for key in mandatory_fields:
|
||||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
||||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
if got_dict.get('_type', 'video') == 'video':
|
||||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||||
|
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||||
|
|
||||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||||
|
|
||||||
@@ -242,19 +254,16 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
return v.__name__
|
return v.__name__
|
||||||
else:
|
else:
|
||||||
return repr(v)
|
return repr(v)
|
||||||
info_dict_str = ''
|
info_dict_str = ''.join(
|
||||||
if len(missing_keys) != len(expected_dict):
|
f' {_repr(k)}: {_repr(v)},\n'
|
||||||
info_dict_str += ''.join(
|
for k, v in test_info_dict.items() if k not in missing_keys)
|
||||||
f' {_repr(k)}: {_repr(v)},\n'
|
if info_dict_str:
|
||||||
for k, v in test_info_dict.items() if k not in missing_keys)
|
info_dict_str += '\n'
|
||||||
|
|
||||||
if info_dict_str:
|
|
||||||
info_dict_str += '\n'
|
|
||||||
info_dict_str += ''.join(
|
info_dict_str += ''.join(
|
||||||
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
||||||
for k in missing_keys)
|
for k in missing_keys)
|
||||||
write_string(
|
info_dict_str = '\n\'info_dict\': {\n' + info_dict_str + '},\n'
|
||||||
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
|
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
missing_keys,
|
missing_keys,
|
||||||
'Missing keys in test definition: %s' % (
|
'Missing keys in test definition: %s' % (
|
||||||
|
|||||||
@@ -44,5 +44,6 @@
|
|||||||
"writesubtitles": false,
|
"writesubtitles": false,
|
||||||
"allsubtitles": false,
|
"allsubtitles": false,
|
||||||
"listsubtitles": false,
|
"listsubtitles": false,
|
||||||
"fixup": "never"
|
"fixup": "never",
|
||||||
|
"allow_playlist_files": false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,9 @@ class InfoExtractorTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
|
|
||||||
class DummyIE(InfoExtractor):
|
class DummyIE(InfoExtractor):
|
||||||
pass
|
def _sort_formats(self, formats, field_preference=[]):
|
||||||
|
self._downloader.sort_formats(
|
||||||
|
{'formats': formats, '_format_sort_fields': field_preference})
|
||||||
|
|
||||||
|
|
||||||
class TestInfoExtractor(unittest.TestCase):
|
class TestInfoExtractor(unittest.TestCase):
|
||||||
@@ -67,6 +69,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
<meta name="og:test1" content='foo > < bar'/>
|
<meta name="og:test1" content='foo > < bar'/>
|
||||||
<meta name="og:test2" content="foo >//< bar"/>
|
<meta name="og:test2" content="foo >//< bar"/>
|
||||||
<meta property=og-test3 content='Ill-formatted opengraph'/>
|
<meta property=og-test3 content='Ill-formatted opengraph'/>
|
||||||
|
<meta property=og:test4 content=unquoted-value/>
|
||||||
'''
|
'''
|
||||||
self.assertEqual(ie._og_search_title(html), 'Foo')
|
self.assertEqual(ie._og_search_title(html), 'Foo')
|
||||||
self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
|
self.assertEqual(ie._og_search_description(html), 'Some video\'s description ')
|
||||||
@@ -79,6 +82,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
self.assertEqual(ie._og_search_property(('test0', 'test1'), html), 'foo > < bar')
|
self.assertEqual(ie._og_search_property(('test0', 'test1'), html), 'foo > < bar')
|
||||||
self.assertRaises(RegexNotFoundError, ie._og_search_property, 'test0', html, None, fatal=True)
|
self.assertRaises(RegexNotFoundError, ie._og_search_property, 'test0', html, None, fatal=True)
|
||||||
self.assertRaises(RegexNotFoundError, ie._og_search_property, ('test0', 'test00'), html, None, fatal=True)
|
self.assertRaises(RegexNotFoundError, ie._og_search_property, ('test0', 'test00'), html, None, fatal=True)
|
||||||
|
self.assertEqual(ie._og_search_property('test4', html), 'unquoted-value')
|
||||||
|
|
||||||
def test_html_search_meta(self):
|
def test_html_search_meta(self):
|
||||||
ie = self.ie
|
ie = self.ie
|
||||||
@@ -913,8 +917,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'acodec': 'mp4a.40.2',
|
'acodec': 'mp4a.40.2',
|
||||||
'video_ext': 'mp4',
|
'video_ext': 'mp4',
|
||||||
'audio_ext': 'none',
|
'audio_ext': 'none',
|
||||||
'vbr': 263.851,
|
|
||||||
'abr': 0,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': '577',
|
'format_id': '577',
|
||||||
'format_index': None,
|
'format_index': None,
|
||||||
@@ -932,8 +934,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'acodec': 'mp4a.40.2',
|
'acodec': 'mp4a.40.2',
|
||||||
'video_ext': 'mp4',
|
'video_ext': 'mp4',
|
||||||
'audio_ext': 'none',
|
'audio_ext': 'none',
|
||||||
'vbr': 577.61,
|
|
||||||
'abr': 0,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': '915',
|
'format_id': '915',
|
||||||
'format_index': None,
|
'format_index': None,
|
||||||
@@ -951,8 +951,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'acodec': 'mp4a.40.2',
|
'acodec': 'mp4a.40.2',
|
||||||
'video_ext': 'mp4',
|
'video_ext': 'mp4',
|
||||||
'audio_ext': 'none',
|
'audio_ext': 'none',
|
||||||
'vbr': 915.905,
|
|
||||||
'abr': 0,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': '1030',
|
'format_id': '1030',
|
||||||
'format_index': None,
|
'format_index': None,
|
||||||
@@ -970,8 +968,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'acodec': 'mp4a.40.2',
|
'acodec': 'mp4a.40.2',
|
||||||
'video_ext': 'mp4',
|
'video_ext': 'mp4',
|
||||||
'audio_ext': 'none',
|
'audio_ext': 'none',
|
||||||
'vbr': 1030.138,
|
|
||||||
'abr': 0,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': '1924',
|
'format_id': '1924',
|
||||||
'format_index': None,
|
'format_index': None,
|
||||||
@@ -989,8 +985,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'acodec': 'mp4a.40.2',
|
'acodec': 'mp4a.40.2',
|
||||||
'video_ext': 'mp4',
|
'video_ext': 'mp4',
|
||||||
'audio_ext': 'none',
|
'audio_ext': 'none',
|
||||||
'vbr': 1924.009,
|
|
||||||
'abr': 0,
|
|
||||||
}],
|
}],
|
||||||
{
|
{
|
||||||
'en': [{
|
'en': [{
|
||||||
@@ -1402,6 +1396,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'vcodec': 'none',
|
'vcodec': 'none',
|
||||||
'acodec': 'AACL',
|
'acodec': 'AACL',
|
||||||
'protocol': 'ism',
|
'protocol': 'ism',
|
||||||
|
'audio_channels': 2,
|
||||||
'_download_params': {
|
'_download_params': {
|
||||||
'stream_type': 'audio',
|
'stream_type': 'audio',
|
||||||
'duration': 8880746666,
|
'duration': 8880746666,
|
||||||
@@ -1415,9 +1410,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'audio_ext': 'isma',
|
|
||||||
'video_ext': 'none',
|
|
||||||
'abr': 128,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-100',
|
'format_id': 'video-100',
|
||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
||||||
@@ -1441,9 +1433,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'video_ext': 'ismv',
|
|
||||||
'audio_ext': 'none',
|
|
||||||
'vbr': 100,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-326',
|
'format_id': 'video-326',
|
||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
||||||
@@ -1467,9 +1456,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'video_ext': 'ismv',
|
|
||||||
'audio_ext': 'none',
|
|
||||||
'vbr': 326,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-698',
|
'format_id': 'video-698',
|
||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
||||||
@@ -1493,9 +1479,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'video_ext': 'ismv',
|
|
||||||
'audio_ext': 'none',
|
|
||||||
'vbr': 698,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-1493',
|
'format_id': 'video-1493',
|
||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
||||||
@@ -1519,9 +1502,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'video_ext': 'ismv',
|
|
||||||
'audio_ext': 'none',
|
|
||||||
'vbr': 1493,
|
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-4482',
|
'format_id': 'video-4482',
|
||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/Manifest',
|
||||||
@@ -1545,9 +1525,6 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4
|
||||||
},
|
},
|
||||||
'video_ext': 'ismv',
|
|
||||||
'audio_ext': 'none',
|
|
||||||
'vbr': 4482,
|
|
||||||
}],
|
}],
|
||||||
{
|
{
|
||||||
'eng': [
|
'eng': [
|
||||||
@@ -1567,6 +1544,265 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'ec-3_test',
|
||||||
|
'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
[{
|
||||||
|
'format_id': 'audio_deu-127',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'isma',
|
||||||
|
'tbr': 127,
|
||||||
|
'asr': 48000,
|
||||||
|
'vcodec': 'none',
|
||||||
|
'acodec': 'AACL',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'audio_channels': 2,
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'audio',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 0,
|
||||||
|
'height': 0,
|
||||||
|
'fourcc': 'AACL',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '1190',
|
||||||
|
'sampling_rate': 48000,
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'audio_deu_1-224',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'isma',
|
||||||
|
'tbr': 224,
|
||||||
|
'asr': 48000,
|
||||||
|
'vcodec': 'none',
|
||||||
|
'acodec': 'EC-3',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'audio_channels': 6,
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'audio',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 0,
|
||||||
|
'height': 0,
|
||||||
|
'fourcc': 'EC-3',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00',
|
||||||
|
'sampling_rate': 48000,
|
||||||
|
'channels': 6,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-23',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 384,
|
||||||
|
'height': 216,
|
||||||
|
'tbr': 23,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 384,
|
||||||
|
'height': 216,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-403',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 400,
|
||||||
|
'height': 224,
|
||||||
|
'tbr': 403,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 400,
|
||||||
|
'height': 224,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-680',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'tbr': 680,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-1253',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'tbr': 1253,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'vbr': 1253,
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 640,
|
||||||
|
'height': 360,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-2121',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 768,
|
||||||
|
'height': 432,
|
||||||
|
'tbr': 2121,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 768,
|
||||||
|
'height': 432,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-3275',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1280,
|
||||||
|
'height': 720,
|
||||||
|
'tbr': 3275,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1280,
|
||||||
|
'height': 720,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-5300',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'tbr': 5300,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'format_id': 'video_deu-8079',
|
||||||
|
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||||
|
'ext': 'ismv',
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'tbr': 8079,
|
||||||
|
'vcodec': 'AVC1',
|
||||||
|
'acodec': 'none',
|
||||||
|
'protocol': 'ism',
|
||||||
|
'language': 'deu',
|
||||||
|
'_download_params': {
|
||||||
|
'stream_type': 'video',
|
||||||
|
'duration': 370000000,
|
||||||
|
'timescale': 10000000,
|
||||||
|
'width': 1920,
|
||||||
|
'height': 1080,
|
||||||
|
'fourcc': 'AVC1',
|
||||||
|
'language': 'deu',
|
||||||
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
|
'channels': 2,
|
||||||
|
'bits_per_sample': 16,
|
||||||
|
'nal_unit_length_field': 4
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
{},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||||
|
|||||||
@@ -10,9 +10,8 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import urllib.error
|
|
||||||
|
|
||||||
from test.helper import FakeYDL, assertRegexpMatches
|
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_os_name
|
from yt_dlp.compat import compat_os_name
|
||||||
from yt_dlp.extractor import YoutubeIE
|
from yt_dlp.extractor import YoutubeIE
|
||||||
@@ -25,6 +24,7 @@ from yt_dlp.utils import (
|
|||||||
int_or_none,
|
int_or_none,
|
||||||
match_filter_func,
|
match_filter_func,
|
||||||
)
|
)
|
||||||
|
from yt_dlp.utils.traversal import traverse_obj
|
||||||
|
|
||||||
TEST_URL = 'http://localhost/sample.mp4'
|
TEST_URL = 'http://localhost/sample.mp4'
|
||||||
|
|
||||||
@@ -68,8 +68,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
|
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats)
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'webm')
|
self.assertEqual(downloaded['ext'], 'webm')
|
||||||
@@ -82,8 +81,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
|
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'mp4')
|
self.assertEqual(downloaded['ext'], 'mp4')
|
||||||
@@ -97,8 +95,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
|
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'mp4')
|
self.assertEqual(downloaded['ext'], 'mp4')
|
||||||
@@ -110,15 +107,14 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
|
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
|
||||||
]
|
]
|
||||||
info_dict['formats'] = formats
|
info_dict['formats'] = formats
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['ext'], 'webm')
|
self.assertEqual(downloaded['ext'], 'webm')
|
||||||
|
|
||||||
def test_format_selection(self):
|
def test_format_selection(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': '35', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
|
{'format_id': '35', 'ext': 'mp4', 'preference': 0, 'url': TEST_URL},
|
||||||
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
|
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
|
||||||
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
|
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
|
||||||
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
|
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
|
||||||
@@ -186,22 +182,19 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
|
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats)
|
||||||
ydl = YDL({'format': 'best'})
|
ydl = YDL({'format': 'best'})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'aac-64')
|
self.assertEqual(downloaded['format_id'], 'aac-64')
|
||||||
|
|
||||||
ydl = YDL({'format': 'mp3'})
|
ydl = YDL({'format': 'mp3'})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
||||||
|
|
||||||
ydl = YDL({'prefer_free_formats': True})
|
ydl = YDL({'prefer_free_formats': True})
|
||||||
ie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
ie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'ogg-64')
|
self.assertEqual(downloaded['format_id'], 'ogg-64')
|
||||||
@@ -346,8 +339,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], '248+172')
|
self.assertEqual(downloaded['format_id'], '248+172')
|
||||||
@@ -355,40 +347,35 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], '38')
|
self.assertEqual(downloaded['format_id'], '38')
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||||
|
|
||||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['248+141'])
|
self.assertEqual(downloaded_ids, ['248+141'])
|
||||||
@@ -396,16 +383,14 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
ydl = YDL({'format': 'best/bestvideo'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||||
|
|
||||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
ydl = YDL({'format': 'best/bestvideo'})
|
||||||
yie = YoutubeIE(ydl)
|
ydl.sort_formats(info_dict)
|
||||||
yie._sort_formats(info_dict['formats'])
|
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||||
@@ -480,7 +465,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
for f in formats:
|
for f in formats:
|
||||||
f['url'] = 'http://_/'
|
f['url'] = 'http://_/'
|
||||||
f['ext'] = 'unknown'
|
f['ext'] = 'unknown'
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats, _format_sort_fields=('id', ))
|
||||||
|
|
||||||
ydl = YDL({'format': 'best[filesize<3000]'})
|
ydl = YDL({'format': 'best[filesize<3000]'})
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
@@ -662,13 +647,17 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
'playlist_autonumber': 2,
|
'playlist_autonumber': 2,
|
||||||
'__last_playlist_index': 100,
|
'__last_playlist_index': 100,
|
||||||
'n_entries': 10,
|
'n_entries': 10,
|
||||||
'formats': [{'id': 'id 1'}, {'id': 'id 2'}, {'id': 'id 3'}]
|
'formats': [
|
||||||
|
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
||||||
|
{'id': 'id 2', 'height': 720},
|
||||||
|
{'id': 'id 3'}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_prepare_outtmpl_and_filename(self):
|
def test_prepare_outtmpl_and_filename(self):
|
||||||
def test(tmpl, expected, *, info=None, **params):
|
def test(tmpl, expected, *, info=None, **params):
|
||||||
params['outtmpl'] = tmpl
|
params['outtmpl'] = tmpl
|
||||||
ydl = YoutubeDL(params)
|
ydl = FakeYDL(params)
|
||||||
ydl._num_downloads = 1
|
ydl._num_downloads = 1
|
||||||
self.assertEqual(ydl.validate_outtmpl(tmpl), None)
|
self.assertEqual(ydl.validate_outtmpl(tmpl), None)
|
||||||
|
|
||||||
@@ -680,7 +669,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
||||||
if callable(expect):
|
if callable(expect):
|
||||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||||
else:
|
elif expect is not None:
|
||||||
self.assertEqual(got, expect, f'Wrong {name} from {tmpl}')
|
self.assertEqual(got, expect, f'Wrong {name} from {tmpl}')
|
||||||
|
|
||||||
# Side-effects
|
# Side-effects
|
||||||
@@ -695,7 +684,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(id)s.%(ext)s', '1234.mp4')
|
test('%(id)s.%(ext)s', '1234.mp4')
|
||||||
test('%(duration_string)s', ('27:46:40', '27-46-40'))
|
test('%(duration_string)s', ('27:46:40', '27-46-40'))
|
||||||
test('%(resolution)s', '1080p')
|
test('%(resolution)s', '1080p')
|
||||||
test('%(playlist_index)s', '001')
|
test('%(playlist_index|)s', '001')
|
||||||
|
test('%(playlist_index&{}!)s', '1!')
|
||||||
test('%(playlist_autonumber)s', '02')
|
test('%(playlist_autonumber)s', '02')
|
||||||
test('%(autonumber)s', '00001')
|
test('%(autonumber)s', '00001')
|
||||||
test('%(autonumber+2)03d', '005', autonumber_start=3)
|
test('%(autonumber+2)03d', '005', autonumber_start=3)
|
||||||
@@ -722,13 +712,14 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(id)s', '-abcd', info={'id': '-abcd'})
|
test('%(id)s', '-abcd', info={'id': '-abcd'})
|
||||||
test('%(id)s', '.abcd', info={'id': '.abcd'})
|
test('%(id)s', '.abcd', info={'id': '.abcd'})
|
||||||
test('%(id)s', 'ab__cd', info={'id': 'ab__cd'})
|
test('%(id)s', 'ab__cd', info={'id': 'ab__cd'})
|
||||||
test('%(id)s', ('ab:cd', 'ab -cd'), info={'id': 'ab:cd'})
|
test('%(id)s', ('ab:cd', 'ab:cd'), info={'id': 'ab:cd'})
|
||||||
test('%(id.0)s', '-', info={'id': '--'})
|
test('%(id.0)s', '-', info={'id': '--'})
|
||||||
|
|
||||||
# Invalid templates
|
# Invalid templates
|
||||||
self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
|
self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
|
||||||
test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
|
test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
|
||||||
test('%(..)s', 'NA')
|
test('%(..)s', 'NA')
|
||||||
|
test('%(formats.{id)s', 'NA')
|
||||||
|
|
||||||
# Entire info_dict
|
# Entire info_dict
|
||||||
def expect_same_infodict(out):
|
def expect_same_infodict(out):
|
||||||
@@ -765,20 +756,23 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(ext)c', 'm')
|
test('%(ext)c', 'm')
|
||||||
test('%(id)d %(id)r', "1234 '1234'")
|
test('%(id)d %(id)r', "1234 '1234'")
|
||||||
test('%(id)r %(height)r', "'1234' 1080")
|
test('%(id)r %(height)r', "'1234' 1080")
|
||||||
|
test('%(title5)a %(height)a', (R"'\xe1\xe9\xed \U0001d400' 1080", None))
|
||||||
test('%(ext)s-%(ext|def)d', 'mp4-def')
|
test('%(ext)s-%(ext|def)d', 'mp4-def')
|
||||||
test('%(width|0)04d', '0000')
|
test('%(width|0)04d', '0')
|
||||||
test('a%(width|)d', 'a', outtmpl_na_placeholder='none')
|
test('a%(width|b)d', 'ab', outtmpl_na_placeholder='none')
|
||||||
|
|
||||||
FORMATS = self.outtmpl_info['formats']
|
FORMATS = self.outtmpl_info['formats']
|
||||||
sanitize = lambda x: x.replace(':', ' -').replace('"', "'").replace('\n', ' ')
|
|
||||||
|
|
||||||
# Custom type casting
|
# Custom type casting
|
||||||
test('%(formats.:.id)l', 'id 1, id 2, id 3')
|
test('%(formats.:.id)l', 'id 1, id 2, id 3')
|
||||||
test('%(formats.:.id)#l', ('id 1\nid 2\nid 3', 'id 1 id 2 id 3'))
|
test('%(formats.:.id)#l', ('id 1\nid 2\nid 3', 'id 1 id 2 id 3'))
|
||||||
test('%(ext)l', 'mp4')
|
test('%(ext)l', 'mp4')
|
||||||
test('%(formats.:.id) 18l', ' id 1, id 2, id 3')
|
test('%(formats.:.id) 18l', ' id 1, id 2, id 3')
|
||||||
test('%(formats)j', (json.dumps(FORMATS), sanitize(json.dumps(FORMATS))))
|
test('%(formats)j', (json.dumps(FORMATS), None))
|
||||||
test('%(formats)#j', (json.dumps(FORMATS, indent=4), sanitize(json.dumps(FORMATS, indent=4))))
|
test('%(formats)#j', (
|
||||||
|
json.dumps(FORMATS, indent=4),
|
||||||
|
json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', """).replace('\n', ' ')
|
||||||
|
))
|
||||||
test('%(title5).3B', 'á')
|
test('%(title5).3B', 'á')
|
||||||
test('%(title5)U', 'áéí 𝐀')
|
test('%(title5)U', 'áéí 𝐀')
|
||||||
test('%(title5)#U', 'a\u0301e\u0301i\u0301 𝐀')
|
test('%(title5)#U', 'a\u0301e\u0301i\u0301 𝐀')
|
||||||
@@ -788,13 +782,13 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(filesize)#D', '1Ki')
|
test('%(filesize)#D', '1Ki')
|
||||||
test('%(height)5.2D', ' 1.08k')
|
test('%(height)5.2D', ' 1.08k')
|
||||||
test('%(title4)#S', 'foo_bar_test')
|
test('%(title4)#S', 'foo_bar_test')
|
||||||
test('%(title4).10S', ('foo \'bar\' ', 'foo \'bar\'' + ('#' if compat_os_name == 'nt' else ' ')))
|
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
||||||
if compat_os_name == 'nt':
|
if compat_os_name == 'nt':
|
||||||
test('%(title4)q', ('"foo \\"bar\\" test"', "'foo _'bar_' test'"))
|
test('%(title4)q', ('"foo ""bar"" test"', None))
|
||||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', "'id 1' 'id 2' 'id 3'"))
|
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
||||||
test('%(formats.0.id)#q', ('"id 1"', "'id 1'"))
|
test('%(formats.0.id)#q', ('"id 1"', None))
|
||||||
else:
|
else:
|
||||||
test('%(title4)q', ('\'foo "bar" test\'', "'foo 'bar' test'"))
|
test('%(title4)q', ('\'foo "bar" test\'', '\'foo "bar" test\''))
|
||||||
test('%(formats.:.id)#q', "'id 1' 'id 2' 'id 3'")
|
test('%(formats.:.id)#q', "'id 1' 'id 2' 'id 3'")
|
||||||
test('%(formats.0.id)#q', "'id 1'")
|
test('%(formats.0.id)#q', "'id 1'")
|
||||||
|
|
||||||
@@ -803,8 +797,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(title|%)s %(title|%%)s', '% %%')
|
test('%(title|%)s %(title|%%)s', '% %%')
|
||||||
test('%(id+1-height+3)05d', '00158')
|
test('%(id+1-height+3)05d', '00158')
|
||||||
test('%(width+100)05d', 'NA')
|
test('%(width+100)05d', 'NA')
|
||||||
test('%(formats.0) 15s', ('% 15s' % FORMATS[0], '% 15s' % sanitize(str(FORMATS[0]))))
|
test('%(formats.0) 15s', ('% 15s' % FORMATS[0], None))
|
||||||
test('%(formats.0)r', (repr(FORMATS[0]), sanitize(repr(FORMATS[0]))))
|
test('%(formats.0)r', (repr(FORMATS[0]), None))
|
||||||
test('%(height.0)03d', '001')
|
test('%(height.0)03d', '001')
|
||||||
test('%(-height.0)04d', '-001')
|
test('%(-height.0)04d', '-001')
|
||||||
test('%(formats.-1.id)s', FORMATS[-1]['id'])
|
test('%(formats.-1.id)s', FORMATS[-1]['id'])
|
||||||
@@ -813,6 +807,12 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
|
test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
|
||||||
test('%(formats.0.id.-1+id)f', '1235.000000')
|
test('%(formats.0.id.-1+id)f', '1235.000000')
|
||||||
test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
|
test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
|
||||||
|
out = json.dumps([{'id': f['id'], 'height.:2': str(f['height'])[:2]}
|
||||||
|
if 'height' in f else {'id': f['id']}
|
||||||
|
for f in FORMATS])
|
||||||
|
test('%(formats.:.{id,height.:2})j', (out, None))
|
||||||
|
test('%(formats.:.{id,height}.id)l', ', '.join(f['id'] for f in FORMATS))
|
||||||
|
test('%(.{id,title})j', ('{"id": "1234"}', '{"id": "1234"}'))
|
||||||
|
|
||||||
# Alternates
|
# Alternates
|
||||||
test('%(title,id)s', '1234')
|
test('%(title,id)s', '1234')
|
||||||
@@ -826,6 +826,11 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(title&foo|baz)s.bar', 'baz.bar')
|
test('%(title&foo|baz)s.bar', 'baz.bar')
|
||||||
test('%(x,id&foo|baz)s.bar', 'foo.bar')
|
test('%(x,id&foo|baz)s.bar', 'foo.bar')
|
||||||
test('%(x,title&foo|baz)s.bar', 'baz.bar')
|
test('%(x,title&foo|baz)s.bar', 'baz.bar')
|
||||||
|
test('%(id&a\nb|)s', ('a\nb', 'a b'))
|
||||||
|
test('%(id&hi {:>10} {}|)s', 'hi 1234 1234')
|
||||||
|
test(R'%(id&{0} {}|)s', 'NA')
|
||||||
|
test(R'%(id&{0.1}|)s', 'NA')
|
||||||
|
test('%(height&{:,d})S', '1,080')
|
||||||
|
|
||||||
# Laziness
|
# Laziness
|
||||||
def gen():
|
def gen():
|
||||||
@@ -852,8 +857,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
# Path expansion and escaping
|
# Path expansion and escaping
|
||||||
test('Hello %(title1)s', 'Hello $PATH')
|
test('Hello %(title1)s', 'Hello $PATH')
|
||||||
test('Hello %(title2)s', 'Hello %PATH%')
|
test('Hello %(title2)s', 'Hello %PATH%')
|
||||||
test('%(title3)s', ('foo/bar\\test', 'foo_bar_test'))
|
test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
|
||||||
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo_bar_test' % os.path.sep))
|
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo⧸bar⧹test' % os.path.sep))
|
||||||
|
|
||||||
def test_format_note(self):
|
def test_format_note(self):
|
||||||
ydl = YoutubeDL()
|
ydl = YoutubeDL()
|
||||||
@@ -871,12 +876,12 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
|
|
||||||
class SimplePP(PostProcessor):
|
class SimplePP(PostProcessor):
|
||||||
def run(self, info):
|
def run(self, info):
|
||||||
with open(audiofile, 'wt') as f:
|
with open(audiofile, 'w') as f:
|
||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
return [info['filepath']], info
|
return [info['filepath']], info
|
||||||
|
|
||||||
def run_pp(params, PP):
|
def run_pp(params, PP):
|
||||||
with open(filename, 'wt') as f:
|
with open(filename, 'w') as f:
|
||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
ydl = YoutubeDL(params)
|
ydl = YoutubeDL(params)
|
||||||
ydl.add_post_processor(PP())
|
ydl.add_post_processor(PP())
|
||||||
@@ -895,7 +900,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
|
|
||||||
class ModifierPP(PostProcessor):
|
class ModifierPP(PostProcessor):
|
||||||
def run(self, info):
|
def run(self, info):
|
||||||
with open(info['filepath'], 'wt') as f:
|
with open(info['filepath'], 'w') as f:
|
||||||
f.write('MODIFIED')
|
f.write('MODIFIED')
|
||||||
return [], info
|
return [], info
|
||||||
|
|
||||||
@@ -1053,6 +1058,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
for v in get_downloaded_info_dicts(params, entries)]
|
for v in get_downloaded_info_dicts(params, entries)]
|
||||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||||
|
|
||||||
test_selection({}, INDICES)
|
test_selection({}, INDICES)
|
||||||
test_selection({'playlistend': 20}, INDICES, True)
|
test_selection({'playlistend': 20}, INDICES, True)
|
||||||
test_selection({'playlistend': 2}, INDICES[:2])
|
test_selection({'playlistend': 2}, INDICES[:2])
|
||||||
@@ -1096,11 +1102,6 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True)
|
test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True)
|
||||||
test_selection({'playlist_items': '-15::15'}, [], True)
|
test_selection({'playlist_items': '-15::15'}, [], True)
|
||||||
|
|
||||||
def test_urlopen_no_file_protocol(self):
|
|
||||||
# see https://github.com/ytdl-org/youtube-dl/issues/8227
|
|
||||||
ydl = YDL()
|
|
||||||
self.assertRaises(urllib.error.URLError, ydl.urlopen, 'file:///etc/passwd')
|
|
||||||
|
|
||||||
def test_do_not_override_ie_key_in_url_transparent(self):
|
def test_do_not_override_ie_key_in_url_transparent(self):
|
||||||
ydl = YDL()
|
ydl = YDL()
|
||||||
|
|
||||||
@@ -1214,6 +1215,129 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
self.assertEqual(downloaded['extractor'], 'Video')
|
self.assertEqual(downloaded['extractor'], 'Video')
|
||||||
self.assertEqual(downloaded['extractor_key'], 'Video')
|
self.assertEqual(downloaded['extractor_key'], 'Video')
|
||||||
|
|
||||||
|
def test_header_cookies(self):
|
||||||
|
from http.cookiejar import Cookie
|
||||||
|
|
||||||
|
ydl = FakeYDL()
|
||||||
|
ydl.report_warning = lambda *_, **__: None
|
||||||
|
|
||||||
|
def cookie(name, value, version=None, domain='', path='', secure=False, expires=None):
|
||||||
|
return Cookie(
|
||||||
|
version or 0, name, value, None, False,
|
||||||
|
domain, bool(domain), bool(domain), path, bool(path),
|
||||||
|
secure, expires, False, None, None, rest={})
|
||||||
|
|
||||||
|
_test_url = 'https://yt.dlp/test'
|
||||||
|
|
||||||
|
def test(encoded_cookies, cookies, *, headers=False, round_trip=None, error_re=None):
|
||||||
|
def _test():
|
||||||
|
ydl.cookiejar.clear()
|
||||||
|
ydl._load_cookies(encoded_cookies, autoscope=headers)
|
||||||
|
if headers:
|
||||||
|
ydl._apply_header_cookies(_test_url)
|
||||||
|
data = {'url': _test_url}
|
||||||
|
ydl._calc_headers(data)
|
||||||
|
self.assertCountEqual(
|
||||||
|
map(vars, ydl.cookiejar), map(vars, cookies),
|
||||||
|
'Extracted cookiejar.Cookie is not the same')
|
||||||
|
if not headers:
|
||||||
|
self.assertEqual(
|
||||||
|
data.get('cookies'), round_trip or encoded_cookies,
|
||||||
|
'Cookie is not the same as round trip')
|
||||||
|
ydl.__dict__['_YoutubeDL__header_cookies'] = []
|
||||||
|
|
||||||
|
with self.subTest(msg=encoded_cookies):
|
||||||
|
if not error_re:
|
||||||
|
_test()
|
||||||
|
return
|
||||||
|
with self.assertRaisesRegex(Exception, error_re):
|
||||||
|
_test()
|
||||||
|
|
||||||
|
test('test=value; Domain=.yt.dlp', [cookie('test', 'value', domain='.yt.dlp')])
|
||||||
|
test('test=value', [cookie('test', 'value')], error_re=r'Unscoped cookies are not allowed')
|
||||||
|
test('cookie1=value1; Domain=.yt.dlp; Path=/test; cookie2=value2; Domain=.yt.dlp; Path=/', [
|
||||||
|
cookie('cookie1', 'value1', domain='.yt.dlp', path='/test'),
|
||||||
|
cookie('cookie2', 'value2', domain='.yt.dlp', path='/')])
|
||||||
|
test('test=value; Domain=.yt.dlp; Path=/test; Secure; Expires=9999999999', [
|
||||||
|
cookie('test', 'value', domain='.yt.dlp', path='/test', secure=True, expires=9999999999)])
|
||||||
|
test('test="value; "; path=/test; domain=.yt.dlp', [
|
||||||
|
cookie('test', 'value; ', domain='.yt.dlp', path='/test')],
|
||||||
|
round_trip='test="value\\073 "; Domain=.yt.dlp; Path=/test')
|
||||||
|
test('name=; Domain=.yt.dlp', [cookie('name', '', domain='.yt.dlp')],
|
||||||
|
round_trip='name=""; Domain=.yt.dlp')
|
||||||
|
|
||||||
|
test('test=value', [cookie('test', 'value', domain='.yt.dlp')], headers=True)
|
||||||
|
test('cookie1=value; Domain=.yt.dlp; cookie2=value', [], headers=True, error_re=r'Invalid syntax')
|
||||||
|
ydl.deprecated_feature = ydl.report_error
|
||||||
|
test('test=value', [], headers=True, error_re=r'Passing cookies as a header is a potential security risk')
|
||||||
|
|
||||||
|
def test_infojson_cookies(self):
|
||||||
|
TEST_FILE = 'test_infojson_cookies.info.json'
|
||||||
|
TEST_URL = 'https://example.com/example.mp4'
|
||||||
|
COOKIES = 'a=b; Domain=.example.com; c=d; Domain=.example.com'
|
||||||
|
COOKIE_HEADER = {'Cookie': 'a=b; c=d'}
|
||||||
|
|
||||||
|
ydl = FakeYDL()
|
||||||
|
ydl.process_info = lambda x: ydl._write_info_json('test', x, TEST_FILE)
|
||||||
|
|
||||||
|
def make_info(info_header_cookies=False, fmts_header_cookies=False, cookies_field=False):
|
||||||
|
fmt = {'url': TEST_URL}
|
||||||
|
if fmts_header_cookies:
|
||||||
|
fmt['http_headers'] = COOKIE_HEADER
|
||||||
|
if cookies_field:
|
||||||
|
fmt['cookies'] = COOKIES
|
||||||
|
return _make_result([fmt], http_headers=COOKIE_HEADER if info_header_cookies else None)
|
||||||
|
|
||||||
|
def test(initial_info, note):
|
||||||
|
result = {}
|
||||||
|
result['processed'] = ydl.process_ie_result(initial_info)
|
||||||
|
self.assertTrue(ydl.cookiejar.get_cookies_for_url(TEST_URL),
|
||||||
|
msg=f'No cookies set in cookiejar after initial process when {note}')
|
||||||
|
ydl.cookiejar.clear()
|
||||||
|
with open(TEST_FILE) as infojson:
|
||||||
|
result['loaded'] = ydl.sanitize_info(json.load(infojson), True)
|
||||||
|
result['final'] = ydl.process_ie_result(result['loaded'].copy(), download=False)
|
||||||
|
self.assertTrue(ydl.cookiejar.get_cookies_for_url(TEST_URL),
|
||||||
|
msg=f'No cookies set in cookiejar after final process when {note}')
|
||||||
|
ydl.cookiejar.clear()
|
||||||
|
for key in ('processed', 'loaded', 'final'):
|
||||||
|
info = result[key]
|
||||||
|
self.assertIsNone(
|
||||||
|
traverse_obj(info, ((None, ('formats', 0)), 'http_headers', 'Cookie'), casesense=False, get_all=False),
|
||||||
|
msg=f'Cookie header not removed in {key} result when {note}')
|
||||||
|
self.assertEqual(
|
||||||
|
traverse_obj(info, ((None, ('formats', 0)), 'cookies'), get_all=False), COOKIES,
|
||||||
|
msg=f'No cookies field found in {key} result when {note}')
|
||||||
|
|
||||||
|
test({'url': TEST_URL, 'http_headers': COOKIE_HEADER, 'id': '1', 'title': 'x'}, 'no formats field')
|
||||||
|
test(make_info(info_header_cookies=True), 'info_dict header cokies')
|
||||||
|
test(make_info(fmts_header_cookies=True), 'format header cookies')
|
||||||
|
test(make_info(info_header_cookies=True, fmts_header_cookies=True), 'info_dict and format header cookies')
|
||||||
|
test(make_info(info_header_cookies=True, fmts_header_cookies=True, cookies_field=True), 'all cookies fields')
|
||||||
|
test(make_info(cookies_field=True), 'cookies format field')
|
||||||
|
test({'url': TEST_URL, 'cookies': COOKIES, 'id': '1', 'title': 'x'}, 'info_dict cookies field only')
|
||||||
|
|
||||||
|
try_rm(TEST_FILE)
|
||||||
|
|
||||||
|
def test_add_headers_cookie(self):
|
||||||
|
def check_for_cookie_header(result):
|
||||||
|
return traverse_obj(result, ((None, ('formats', 0)), 'http_headers', 'Cookie'), casesense=False, get_all=False)
|
||||||
|
|
||||||
|
ydl = FakeYDL({'http_headers': {'Cookie': 'a=b'}})
|
||||||
|
ydl._apply_header_cookies(_make_result([])['webpage_url']) # Scope to input webpage URL: .example.com
|
||||||
|
|
||||||
|
fmt = {'url': 'https://example.com/video.mp4'}
|
||||||
|
result = ydl.process_ie_result(_make_result([fmt]), download=False)
|
||||||
|
self.assertIsNone(check_for_cookie_header(result), msg='http_headers cookies in result info_dict')
|
||||||
|
self.assertEqual(result.get('cookies'), 'a=b; Domain=.example.com', msg='No cookies were set in cookies field')
|
||||||
|
self.assertIn('a=b', ydl.cookiejar.get_cookie_header(fmt['url']), msg='No cookies were set in cookiejar')
|
||||||
|
|
||||||
|
fmt = {'url': 'https://wrong.com/video.mp4'}
|
||||||
|
result = ydl.process_ie_result(_make_result([fmt]), download=False)
|
||||||
|
self.assertIsNone(check_for_cookie_header(result), msg='http_headers cookies for wrong domain')
|
||||||
|
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
||||||
|
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -11,16 +11,16 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
import re
|
import re
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from yt_dlp.utils import YoutubeDLCookieJar
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
|
||||||
|
|
||||||
class TestYoutubeDLCookieJar(unittest.TestCase):
|
class TestYoutubeDLCookieJar(unittest.TestCase):
|
||||||
def test_keep_session_cookies(self):
|
def test_keep_session_cookies(self):
|
||||||
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/session_cookies.txt')
|
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/session_cookies.txt')
|
||||||
cookiejar.load(ignore_discard=True, ignore_expires=True)
|
cookiejar.load()
|
||||||
tf = tempfile.NamedTemporaryFile(delete=False)
|
tf = tempfile.NamedTemporaryFile(delete=False)
|
||||||
try:
|
try:
|
||||||
cookiejar.save(filename=tf.name, ignore_discard=True, ignore_expires=True)
|
cookiejar.save(filename=tf.name)
|
||||||
temp = tf.read().decode()
|
temp = tf.read().decode()
|
||||||
self.assertTrue(re.search(
|
self.assertTrue(re.search(
|
||||||
r'www\.foobar\.foobar\s+FALSE\s+/\s+TRUE\s+0\s+YoutubeDLExpiresEmpty\s+YoutubeDLExpiresEmptyValue', temp))
|
r'www\.foobar\.foobar\s+FALSE\s+/\s+TRUE\s+0\s+YoutubeDLExpiresEmpty\s+YoutubeDLExpiresEmptyValue', temp))
|
||||||
@@ -32,7 +32,7 @@ class TestYoutubeDLCookieJar(unittest.TestCase):
|
|||||||
|
|
||||||
def test_strip_httponly_prefix(self):
|
def test_strip_httponly_prefix(self):
|
||||||
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt')
|
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt')
|
||||||
cookiejar.load(ignore_discard=True, ignore_expires=True)
|
cookiejar.load()
|
||||||
|
|
||||||
def assert_cookie_has_value(key):
|
def assert_cookie_has_value(key):
|
||||||
self.assertEqual(cookiejar._cookies['www.foobar.foobar']['/'][key].value, key + '_VALUE')
|
self.assertEqual(cookiejar._cookies['www.foobar.foobar']['/'][key].value, key + '_VALUE')
|
||||||
@@ -42,11 +42,25 @@ class TestYoutubeDLCookieJar(unittest.TestCase):
|
|||||||
|
|
||||||
def test_malformed_cookies(self):
|
def test_malformed_cookies(self):
|
||||||
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/malformed_cookies.txt')
|
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/malformed_cookies.txt')
|
||||||
cookiejar.load(ignore_discard=True, ignore_expires=True)
|
cookiejar.load()
|
||||||
# Cookies should be empty since all malformed cookie file entries
|
# Cookies should be empty since all malformed cookie file entries
|
||||||
# will be ignored
|
# will be ignored
|
||||||
self.assertFalse(cookiejar._cookies)
|
self.assertFalse(cookiejar._cookies)
|
||||||
|
|
||||||
|
def test_get_cookie_header(self):
|
||||||
|
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/httponly_cookies.txt')
|
||||||
|
cookiejar.load()
|
||||||
|
header = cookiejar.get_cookie_header('https://www.foobar.foobar')
|
||||||
|
self.assertIn('HTTPONLY_COOKIE', header)
|
||||||
|
|
||||||
|
def test_get_cookies_for_url(self):
|
||||||
|
cookiejar = YoutubeDLCookieJar('./test/testdata/cookies/session_cookies.txt')
|
||||||
|
cookiejar.load()
|
||||||
|
cookies = cookiejar.get_cookies_for_url('https://www.foobar.foobar/')
|
||||||
|
self.assertEqual(len(cookies), 2)
|
||||||
|
cookies = cookiejar.get_cookies_for_url('https://foobar.foobar/')
|
||||||
|
self.assertFalse(cookies)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
import base64
|
import base64
|
||||||
|
|
||||||
from yt_dlp.aes import (
|
from yt_dlp.aes import (
|
||||||
BLOCK_SIZE_BYTES,
|
|
||||||
aes_cbc_decrypt,
|
aes_cbc_decrypt,
|
||||||
aes_cbc_decrypt_bytes,
|
aes_cbc_decrypt_bytes,
|
||||||
aes_cbc_encrypt,
|
aes_cbc_encrypt,
|
||||||
@@ -24,8 +23,10 @@ from yt_dlp.aes import (
|
|||||||
aes_encrypt,
|
aes_encrypt,
|
||||||
aes_gcm_decrypt_and_verify,
|
aes_gcm_decrypt_and_verify,
|
||||||
aes_gcm_decrypt_and_verify_bytes,
|
aes_gcm_decrypt_and_verify_bytes,
|
||||||
|
key_expansion,
|
||||||
|
pad_block,
|
||||||
)
|
)
|
||||||
from yt_dlp.dependencies import Cryptodome_AES
|
from yt_dlp.dependencies import Cryptodome
|
||||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
||||||
|
|
||||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||||
@@ -47,7 +48,7 @@ class TestAES(unittest.TestCase):
|
|||||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
@@ -77,7 +78,7 @@ class TestAES(unittest.TestCase):
|
|||||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
||||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
@@ -101,8 +102,7 @@ class TestAES(unittest.TestCase):
|
|||||||
|
|
||||||
def test_ecb_encrypt(self):
|
def test_ecb_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = bytes_to_intlist(self.secret_msg)
|
||||||
data += [0x08] * (BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES)
|
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
||||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key, self.iv))
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
@@ -112,6 +112,41 @@ class TestAES(unittest.TestCase):
|
|||||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
|
def test_key_expansion(self):
|
||||||
|
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||||
|
|
||||||
|
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
||||||
|
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||||
|
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||||
|
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||||
|
0x2D, 0xAD, 0xDE, 0x47, 0x6C, 0x5A, 0xAF, 0x86, 0x9F, 0xBA, 0x00, 0x72, 0x40, 0x93, 0x82, 0xA7,
|
||||||
|
0xF9, 0xBE, 0x82, 0x4E, 0x95, 0xE4, 0x2D, 0xC8, 0x0A, 0x5E, 0x2D, 0xBA, 0x4A, 0xCD, 0xAF, 0x1D,
|
||||||
|
0x54, 0xC7, 0x26, 0x98, 0xC1, 0x23, 0x0B, 0x50, 0xCB, 0x7D, 0x26, 0xEA, 0x81, 0xB0, 0x89, 0xF7,
|
||||||
|
0x93, 0x60, 0x4E, 0x94, 0x52, 0x43, 0x45, 0xC4, 0x99, 0x3E, 0x63, 0x2E, 0x18, 0x8E, 0xEA, 0xD9,
|
||||||
|
0xCA, 0xE7, 0x7B, 0x39, 0x98, 0xA4, 0x3E, 0xFD, 0x01, 0x9A, 0x5D, 0xD3, 0x19, 0x14, 0xB7, 0x0A,
|
||||||
|
0xB0, 0x4E, 0x1C, 0xED, 0x28, 0xEA, 0x22, 0x10, 0x29, 0x70, 0x7F, 0xC3, 0x30, 0x64, 0xC8, 0xC9,
|
||||||
|
0xE8, 0xA6, 0xC1, 0xE9, 0xC0, 0x4C, 0xE3, 0xF9, 0xE9, 0x3C, 0x9C, 0x3A, 0xD9, 0x58, 0x54, 0xF3,
|
||||||
|
0xB4, 0x86, 0xCC, 0xDC, 0x74, 0xCA, 0x2F, 0x25, 0x9D, 0xF6, 0xB3, 0x1F, 0x44, 0xAE, 0xE7, 0xEC])
|
||||||
|
|
||||||
|
def test_pad_block(self):
|
||||||
|
block = [0x21, 0xA0, 0x43, 0xFF]
|
||||||
|
|
||||||
|
self.assertEqual(pad_block(block, 'pkcs7'),
|
||||||
|
block + [0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
||||||
|
|
||||||
|
self.assertEqual(pad_block(block, 'iso7816'),
|
||||||
|
block + [0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||||
|
|
||||||
|
self.assertEqual(pad_block(block, 'whitespace'),
|
||||||
|
block + [0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
||||||
|
|
||||||
|
self.assertEqual(pad_block(block, 'zero'),
|
||||||
|
block + [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||||
|
|
||||||
|
block = list(range(16))
|
||||||
|
for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
|
||||||
|
self.assertEqual(pad_block(block, mode), block, mode)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
from test.helper import is_download_test, try_rm
|
from test.helper import is_download_test, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.utils import DownloadError
|
||||||
|
|
||||||
|
|
||||||
def _download_restricted(url, filename, age):
|
def _download_restricted(url, filename, age):
|
||||||
@@ -25,10 +26,14 @@ def _download_restricted(url, filename, age):
|
|||||||
ydl.add_default_info_extractors()
|
ydl.add_default_info_extractors()
|
||||||
json_filename = os.path.splitext(filename)[0] + '.info.json'
|
json_filename = os.path.splitext(filename)[0] + '.info.json'
|
||||||
try_rm(json_filename)
|
try_rm(json_filename)
|
||||||
ydl.download([url])
|
try:
|
||||||
res = os.path.exists(json_filename)
|
ydl.download([url])
|
||||||
try_rm(json_filename)
|
except DownloadError:
|
||||||
return res
|
pass
|
||||||
|
else:
|
||||||
|
return os.path.exists(json_filename)
|
||||||
|
finally:
|
||||||
|
try_rm(json_filename)
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@@ -38,12 +43,12 @@ class TestAgeRestriction(unittest.TestCase):
|
|||||||
self.assertFalse(_download_restricted(url, filename, age))
|
self.assertFalse(_download_restricted(url, filename, age))
|
||||||
|
|
||||||
def test_youtube(self):
|
def test_youtube(self):
|
||||||
self._assert_restricted('07FYdnEawAQ', '07FYdnEawAQ.mp4', 10)
|
self._assert_restricted('HtVdAasjOgU', 'HtVdAasjOgU.mp4', 10)
|
||||||
|
|
||||||
def test_youporn(self):
|
def test_youporn(self):
|
||||||
self._assert_restricted(
|
self._assert_restricted(
|
||||||
'http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/',
|
'https://www.youporn.com/watch/16715086/sex-ed-in-detention-18-asmr/',
|
||||||
'505835.mp4', 2, old_age=25)
|
'16715086.mp4', 2, old_age=25)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -9,15 +9,16 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from yt_dlp import compat
|
from yt_dlp import compat
|
||||||
|
from yt_dlp.compat import urllib # isort: split
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import (
|
||||||
compat_etree_fromstring,
|
compat_etree_fromstring,
|
||||||
compat_expanduser,
|
compat_expanduser,
|
||||||
compat_urllib_parse_unquote,
|
compat_urllib_parse_unquote,
|
||||||
compat_urllib_parse_urlencode,
|
compat_urllib_parse_urlencode,
|
||||||
)
|
)
|
||||||
|
from yt_dlp.compat.urllib.request import getproxies
|
||||||
|
|
||||||
|
|
||||||
class TestCompat(unittest.TestCase):
|
class TestCompat(unittest.TestCase):
|
||||||
@@ -28,7 +29,10 @@ class TestCompat(unittest.TestCase):
|
|||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
compat.WINDOWS_VT_MODE
|
compat.WINDOWS_VT_MODE
|
||||||
|
|
||||||
compat.asyncio.events # Must not raise error
|
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||||
|
|
||||||
|
with self.assertWarns(DeprecationWarning):
|
||||||
|
compat.compat_pycrypto_AES # Must not raise error
|
||||||
|
|
||||||
def test_compat_expanduser(self):
|
def test_compat_expanduser(self):
|
||||||
old_home = os.environ.get('HOME')
|
old_home = os.environ.get('HOME')
|
||||||
|
|||||||
227
test/test_config.py
Normal file
227
test/test_config.py
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import unittest.mock
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import itertools
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from yt_dlp.compat import compat_expanduser
|
||||||
|
from yt_dlp.options import create_parser, parseOpts
|
||||||
|
from yt_dlp.utils import Config, get_executable_path
|
||||||
|
|
||||||
|
ENVIRON_DEFAULTS = {
|
||||||
|
'HOME': None,
|
||||||
|
'XDG_CONFIG_HOME': '/_xdg_config_home/',
|
||||||
|
'USERPROFILE': 'C:/Users/testing/',
|
||||||
|
'APPDATA': 'C:/Users/testing/AppData/Roaming/',
|
||||||
|
'HOMEDRIVE': 'C:/',
|
||||||
|
'HOMEPATH': 'Users/testing/',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def set_environ(**kwargs):
|
||||||
|
saved_environ = os.environ.copy()
|
||||||
|
|
||||||
|
for name, value in {**ENVIRON_DEFAULTS, **kwargs}.items():
|
||||||
|
if value is None:
|
||||||
|
os.environ.pop(name, None)
|
||||||
|
else:
|
||||||
|
os.environ[name] = value
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(saved_environ)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_expected_groups():
|
||||||
|
xdg_config_home = os.getenv('XDG_CONFIG_HOME') or compat_expanduser('~/.config')
|
||||||
|
appdata_dir = os.getenv('appdata')
|
||||||
|
home_dir = compat_expanduser('~')
|
||||||
|
return {
|
||||||
|
'Portable': [
|
||||||
|
Path(get_executable_path(), 'yt-dlp.conf'),
|
||||||
|
],
|
||||||
|
'Home': [
|
||||||
|
Path('yt-dlp.conf'),
|
||||||
|
],
|
||||||
|
'User': [
|
||||||
|
Path(xdg_config_home, 'yt-dlp.conf'),
|
||||||
|
Path(xdg_config_home, 'yt-dlp', 'config'),
|
||||||
|
Path(xdg_config_home, 'yt-dlp', 'config.txt'),
|
||||||
|
*((
|
||||||
|
Path(appdata_dir, 'yt-dlp.conf'),
|
||||||
|
Path(appdata_dir, 'yt-dlp', 'config'),
|
||||||
|
Path(appdata_dir, 'yt-dlp', 'config.txt'),
|
||||||
|
) if appdata_dir else ()),
|
||||||
|
Path(home_dir, 'yt-dlp.conf'),
|
||||||
|
Path(home_dir, 'yt-dlp.conf.txt'),
|
||||||
|
Path(home_dir, '.yt-dlp', 'config'),
|
||||||
|
Path(home_dir, '.yt-dlp', 'config.txt'),
|
||||||
|
],
|
||||||
|
'System': [
|
||||||
|
Path('/etc/yt-dlp.conf'),
|
||||||
|
Path('/etc/yt-dlp/config'),
|
||||||
|
Path('/etc/yt-dlp/config.txt'),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfig(unittest.TestCase):
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config__ENVIRON_DEFAULTS_sanity(self):
|
||||||
|
expected = make_expected()
|
||||||
|
self.assertCountEqual(
|
||||||
|
set(expected), expected,
|
||||||
|
'ENVIRON_DEFAULTS produces non unique names')
|
||||||
|
|
||||||
|
def test_config_all_environ_values(self):
|
||||||
|
for name, value in ENVIRON_DEFAULTS.items():
|
||||||
|
for new_value in (None, '', '.', value or '/some/dir'):
|
||||||
|
with set_environ(**{name: new_value}):
|
||||||
|
self._simple_grouping_test()
|
||||||
|
|
||||||
|
def test_config_default_expected_locations(self):
|
||||||
|
files, _ = self._simple_config_test()
|
||||||
|
self.assertEqual(
|
||||||
|
files, make_expected(),
|
||||||
|
'Not all expected locations have been checked')
|
||||||
|
|
||||||
|
def test_config_default_grouping(self):
|
||||||
|
self._simple_grouping_test()
|
||||||
|
|
||||||
|
def _simple_grouping_test(self):
|
||||||
|
expected_groups = make_expected_groups()
|
||||||
|
for name, group in expected_groups.items():
|
||||||
|
for index, existing_path in enumerate(group):
|
||||||
|
result, opts = self._simple_config_test(existing_path)
|
||||||
|
expected = expected_from_expected_groups(expected_groups, existing_path)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expected,
|
||||||
|
f'The checked locations do not match the expected ({name}, {index})')
|
||||||
|
self.assertEqual(
|
||||||
|
opts.outtmpl['default'], '1',
|
||||||
|
f'The used result value was incorrect ({name}, {index})')
|
||||||
|
|
||||||
|
def _simple_config_test(self, *stop_paths):
|
||||||
|
encountered = 0
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal encountered
|
||||||
|
path = Path(filename)
|
||||||
|
paths.append(path)
|
||||||
|
if path in stop_paths:
|
||||||
|
encountered += 1
|
||||||
|
return ['-o', f'{encountered}']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
_, opts, _ = parseOpts([], False)
|
||||||
|
|
||||||
|
return paths, opts
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_early_exit_commandline(self):
|
||||||
|
self._early_exit_test(0, '--ignore-config')
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_early_exit_files(self):
|
||||||
|
for index, _ in enumerate(make_expected(), 1):
|
||||||
|
self._early_exit_test(index)
|
||||||
|
|
||||||
|
def _early_exit_test(self, allowed_reads, *args):
|
||||||
|
reads = 0
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal reads
|
||||||
|
reads += 1
|
||||||
|
|
||||||
|
if reads > allowed_reads:
|
||||||
|
self.fail('The remaining config was not ignored')
|
||||||
|
elif reads == allowed_reads:
|
||||||
|
return ['--ignore-config']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
parseOpts(args, False)
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_override_commandline(self):
|
||||||
|
self._override_test(0, '-o', 'pass')
|
||||||
|
|
||||||
|
@set_environ()
|
||||||
|
def test_config_override_files(self):
|
||||||
|
for index, _ in enumerate(make_expected(), 1):
|
||||||
|
self._override_test(index)
|
||||||
|
|
||||||
|
def _override_test(self, start_index, *args):
|
||||||
|
index = 0
|
||||||
|
|
||||||
|
def read_file(filename, default=[]):
|
||||||
|
nonlocal index
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if index > start_index:
|
||||||
|
return ['-o', 'fail']
|
||||||
|
elif index == start_index:
|
||||||
|
return ['-o', 'pass']
|
||||||
|
|
||||||
|
with ConfigMock(read_file):
|
||||||
|
_, opts, _ = parseOpts(args, False)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
opts.outtmpl['default'], 'pass',
|
||||||
|
'The earlier group did not override the later ones')
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ConfigMock(read_file=None):
|
||||||
|
with unittest.mock.patch('yt_dlp.options.Config') as mock:
|
||||||
|
mock.return_value = Config(create_parser())
|
||||||
|
if read_file is not None:
|
||||||
|
mock.read_file = read_file
|
||||||
|
|
||||||
|
yield mock
|
||||||
|
|
||||||
|
|
||||||
|
def make_expected(*filepaths):
|
||||||
|
return expected_from_expected_groups(_generate_expected_groups(), *filepaths)
|
||||||
|
|
||||||
|
|
||||||
|
def make_expected_groups(*filepaths):
|
||||||
|
return _filter_expected_groups(_generate_expected_groups(), filepaths)
|
||||||
|
|
||||||
|
|
||||||
|
def expected_from_expected_groups(expected_groups, *filepaths):
|
||||||
|
return list(itertools.chain.from_iterable(
|
||||||
|
_filter_expected_groups(expected_groups, filepaths).values()))
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_expected_groups(expected, filepaths):
|
||||||
|
if not filepaths:
|
||||||
|
return expected
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for group, paths in expected.items():
|
||||||
|
new_paths = []
|
||||||
|
for path in paths:
|
||||||
|
new_paths.append(path)
|
||||||
|
if path in filepaths:
|
||||||
|
break
|
||||||
|
|
||||||
|
result[group] = new_paths
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -3,6 +3,7 @@ from datetime import datetime, timezone
|
|||||||
|
|
||||||
from yt_dlp import cookies
|
from yt_dlp import cookies
|
||||||
from yt_dlp.cookies import (
|
from yt_dlp.cookies import (
|
||||||
|
LenientSimpleCookie,
|
||||||
LinuxChromeCookieDecryptor,
|
LinuxChromeCookieDecryptor,
|
||||||
MacChromeCookieDecryptor,
|
MacChromeCookieDecryptor,
|
||||||
WindowsChromeCookieDecryptor,
|
WindowsChromeCookieDecryptor,
|
||||||
@@ -48,32 +49,38 @@ class TestCookies(unittest.TestCase):
|
|||||||
""" based on https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util_unittest.cc """
|
""" based on https://chromium.googlesource.com/chromium/src/+/refs/heads/main/base/nix/xdg_util_unittest.cc """
|
||||||
test_cases = [
|
test_cases = [
|
||||||
({}, _LinuxDesktopEnvironment.OTHER),
|
({}, _LinuxDesktopEnvironment.OTHER),
|
||||||
|
({'DESKTOP_SESSION': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de'}, _LinuxDesktopEnvironment.OTHER),
|
||||||
|
|
||||||
({'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
({'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
({'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE),
|
({'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE),
|
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE),
|
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||||
|
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
|
|
||||||
({'XDG_CURRENT_DESKTOP': 'X-Cinnamon'}, _LinuxDesktopEnvironment.CINNAMON),
|
({'XDG_CURRENT_DESKTOP': 'X-Cinnamon'}, _LinuxDesktopEnvironment.CINNAMON),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'Deepin'}, _LinuxDesktopEnvironment.DEEPIN),
|
||||||
({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
|
||||||
({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE),
|
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5),
|
||||||
({'XDG_CURRENT_DESKTOP': 'KDE'}, _LinuxDesktopEnvironment.KDE),
|
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '6'}, _LinuxDesktopEnvironment.KDE6),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'KDE'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
({'XDG_CURRENT_DESKTOP': 'Pantheon'}, _LinuxDesktopEnvironment.PANTHEON),
|
({'XDG_CURRENT_DESKTOP': 'Pantheon'}, _LinuxDesktopEnvironment.PANTHEON),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'UKUI'}, _LinuxDesktopEnvironment.UKUI),
|
||||||
({'XDG_CURRENT_DESKTOP': 'Unity'}, _LinuxDesktopEnvironment.UNITY),
|
({'XDG_CURRENT_DESKTOP': 'Unity'}, _LinuxDesktopEnvironment.UNITY),
|
||||||
({'XDG_CURRENT_DESKTOP': 'Unity:Unity7'}, _LinuxDesktopEnvironment.UNITY),
|
({'XDG_CURRENT_DESKTOP': 'Unity:Unity7'}, _LinuxDesktopEnvironment.UNITY),
|
||||||
({'XDG_CURRENT_DESKTOP': 'Unity:Unity8'}, _LinuxDesktopEnvironment.UNITY),
|
({'XDG_CURRENT_DESKTOP': 'Unity:Unity8'}, _LinuxDesktopEnvironment.UNITY),
|
||||||
]
|
]
|
||||||
|
|
||||||
for env, expected_desktop_environment in test_cases:
|
for env, expected_desktop_environment in test_cases:
|
||||||
self.assertEqual(_get_linux_desktop_environment(env), expected_desktop_environment)
|
self.assertEqual(_get_linux_desktop_environment(env, Logger()), expected_desktop_environment)
|
||||||
|
|
||||||
def test_chrome_cookie_decryptor_linux_derive_key(self):
|
def test_chrome_cookie_decryptor_linux_derive_key(self):
|
||||||
key = LinuxChromeCookieDecryptor.derive_key(b'abc')
|
key = LinuxChromeCookieDecryptor.derive_key(b'abc')
|
||||||
@@ -137,3 +144,163 @@ class TestCookies(unittest.TestCase):
|
|||||||
def test_pbkdf2_sha1(self):
|
def test_pbkdf2_sha1(self):
|
||||||
key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
|
key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
|
||||||
self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
|
self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
|
||||||
|
|
||||||
|
|
||||||
|
class TestLenientSimpleCookie(unittest.TestCase):
|
||||||
|
def _run_tests(self, *cases):
|
||||||
|
for message, raw_cookie, expected in cases:
|
||||||
|
cookie = LenientSimpleCookie(raw_cookie)
|
||||||
|
|
||||||
|
with self.subTest(message, expected=expected):
|
||||||
|
self.assertEqual(cookie.keys(), expected.keys(), message)
|
||||||
|
|
||||||
|
for key, expected_value in expected.items():
|
||||||
|
morsel = cookie[key]
|
||||||
|
if isinstance(expected_value, tuple):
|
||||||
|
expected_value, expected_attributes = expected_value
|
||||||
|
else:
|
||||||
|
expected_attributes = {}
|
||||||
|
|
||||||
|
attributes = {
|
||||||
|
key: value
|
||||||
|
for key, value in dict(morsel).items()
|
||||||
|
if value != ""
|
||||||
|
}
|
||||||
|
self.assertEqual(attributes, expected_attributes, message)
|
||||||
|
|
||||||
|
self.assertEqual(morsel.value, expected_value, message)
|
||||||
|
|
||||||
|
def test_parsing(self):
|
||||||
|
self._run_tests(
|
||||||
|
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
||||||
|
(
|
||||||
|
"Test basic cookie",
|
||||||
|
"chips=ahoy; vienna=finger",
|
||||||
|
{"chips": "ahoy", "vienna": "finger"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test quoted cookie",
|
||||||
|
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
||||||
|
{"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow '=' in an unquoted value",
|
||||||
|
"keebler=E=mc2",
|
||||||
|
{"keebler": "E=mc2"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow cookies with ':' in their name",
|
||||||
|
"key:term=value:term",
|
||||||
|
{"key:term": "value:term"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow '[' and ']' in cookie values",
|
||||||
|
"a=b; c=[; d=r; f=h",
|
||||||
|
{"a": "b", "c": "[", "d": "r", "f": "h"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test basic cookie attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test flag only cookie attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test flag only attribute with values",
|
||||||
|
"eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
|
||||||
|
{"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test special case for 'expires' attribute, 4 digit year",
|
||||||
|
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
||||||
|
{"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test special case for 'expires' attribute, 2 digit year",
|
||||||
|
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
||||||
|
{"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test extra spaces in keys and values",
|
||||||
|
"eggs = scrambled ; secure ; path = bar ; foo=foo ",
|
||||||
|
{"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Test quoted attributes",
|
||||||
|
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
|
||||||
|
),
|
||||||
|
# Our own tests that CPython passes
|
||||||
|
(
|
||||||
|
"Allow ';' in quoted value",
|
||||||
|
'chips="a;hoy"; vienna=finger',
|
||||||
|
{"chips": "a;hoy", "vienna": "finger"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Keep only the last set value",
|
||||||
|
"a=c; a=b",
|
||||||
|
{"a": "b"},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_lenient_parsing(self):
|
||||||
|
self._run_tests(
|
||||||
|
(
|
||||||
|
"Ignore and try to skip invalid cookies",
|
||||||
|
'chips={"ahoy;": 1}; vienna="finger;"',
|
||||||
|
{"vienna": "finger;"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Ignore cookies without a name",
|
||||||
|
"a=b; unnamed; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Ignore '\"' cookie without name",
|
||||||
|
'a=b; "; c=d',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Skip all space separated values",
|
||||||
|
"x a=b c=d x; e=f",
|
||||||
|
{"a": "b", "c": "d", "e": "f"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Skip all space separated values",
|
||||||
|
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Expect quote mending",
|
||||||
|
'a=b; invalid="; c=d',
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Reset morsel after invalid to not capture attributes",
|
||||||
|
"a=b; invalid; Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Reset morsel after invalid to not capture attributes",
|
||||||
|
"a=b; $invalid; $Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Continue after non-flag attribute without value",
|
||||||
|
"a=b; path; Version=1; c=d",
|
||||||
|
{"a": "b", "c": "d"},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Allow cookie attributes with `$` prefix",
|
||||||
|
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
||||||
|
{"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"Invalid Morsel keys should not result in an error",
|
||||||
|
"Key=Value; [Invalid]=Value; Another=Value",
|
||||||
|
{"Key": "Value", "Another": "Value"},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|||||||
@@ -8,11 +8,9 @@ import unittest
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import http.client
|
|
||||||
import json
|
import json
|
||||||
import socket
|
|
||||||
import urllib.error
|
|
||||||
|
|
||||||
from test.helper import (
|
from test.helper import (
|
||||||
assertGreaterEqual,
|
assertGreaterEqual,
|
||||||
@@ -20,6 +18,7 @@ from test.helper import (
|
|||||||
expect_warnings,
|
expect_warnings,
|
||||||
get_params,
|
get_params,
|
||||||
gettestcases,
|
gettestcases,
|
||||||
|
getwebpagetestcases,
|
||||||
is_download_test,
|
is_download_test,
|
||||||
report_warning,
|
report_warning,
|
||||||
try_rm,
|
try_rm,
|
||||||
@@ -27,11 +26,14 @@ from test.helper import (
|
|||||||
|
|
||||||
import yt_dlp.YoutubeDL # isort: split
|
import yt_dlp.YoutubeDL # isort: split
|
||||||
from yt_dlp.extractor import get_info_extractor
|
from yt_dlp.extractor import get_info_extractor
|
||||||
|
from yt_dlp.networking.exceptions import HTTPError, TransportError
|
||||||
from yt_dlp.utils import (
|
from yt_dlp.utils import (
|
||||||
DownloadError,
|
DownloadError,
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
UnavailableVideoError,
|
UnavailableVideoError,
|
||||||
|
YoutubeDLError,
|
||||||
format_bytes,
|
format_bytes,
|
||||||
|
join_nonempty,
|
||||||
)
|
)
|
||||||
|
|
||||||
RETRIES = 3
|
RETRIES = 3
|
||||||
@@ -57,7 +59,9 @@ def _file_md5(fn):
|
|||||||
return hashlib.md5(f.read()).hexdigest()
|
return hashlib.md5(f.read()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
defs = gettestcases()
|
normal_test_cases = gettestcases()
|
||||||
|
webpage_test_cases = getwebpagetestcases()
|
||||||
|
tests_counter = collections.defaultdict(collections.Counter)
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@@ -72,24 +76,13 @@ class TestDownload(unittest.TestCase):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Identify each test with the `add_ie` attribute, if available."""
|
"""Identify each test with the `add_ie` attribute, if available."""
|
||||||
|
cls, add_ie = type(self), getattr(self, self._testMethodName).add_ie
|
||||||
|
return f'{self._testMethodName} ({cls.__module__}.{cls.__name__}){f" [{add_ie}]" if add_ie else ""}:'
|
||||||
|
|
||||||
def strclass(cls):
|
|
||||||
"""From 2.7's unittest; 2.6 had _strclass so we can't import it."""
|
|
||||||
return f'{cls.__module__}.{cls.__name__}'
|
|
||||||
|
|
||||||
add_ie = getattr(self, self._testMethodName).add_ie
|
|
||||||
return '%s (%s)%s:' % (self._testMethodName,
|
|
||||||
strclass(self.__class__),
|
|
||||||
' [%s]' % add_ie if add_ie else '')
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.defs = defs
|
|
||||||
|
|
||||||
# Dynamically generate tests
|
# Dynamically generate tests
|
||||||
|
|
||||||
|
|
||||||
def generator(test_case, tname):
|
def generator(test_case, tname):
|
||||||
|
|
||||||
def test_template(self):
|
def test_template(self):
|
||||||
if self.COMPLETED_TESTS.get(tname):
|
if self.COMPLETED_TESTS.get(tname):
|
||||||
return
|
return
|
||||||
@@ -108,14 +101,16 @@ def generator(test_case, tname):
|
|||||||
print_skipping('IE marked as not _WORKING')
|
print_skipping('IE marked as not _WORKING')
|
||||||
|
|
||||||
for tc in test_cases:
|
for tc in test_cases:
|
||||||
|
if tc.get('expected_exception'):
|
||||||
|
continue
|
||||||
info_dict = tc.get('info_dict', {})
|
info_dict = tc.get('info_dict', {})
|
||||||
params = tc.get('params', {})
|
params = tc.get('params', {})
|
||||||
if not info_dict.get('id'):
|
if not info_dict.get('id'):
|
||||||
raise Exception('Test definition incorrect. \'id\' key is not present')
|
raise Exception(f'Test {tname} definition incorrect - "id" key is not present')
|
||||||
elif not info_dict.get('ext'):
|
elif not info_dict.get('ext') and info_dict.get('_type', 'video') == 'video':
|
||||||
if params.get('skip_download') and params.get('ignore_no_formats_error'):
|
if params.get('skip_download') and params.get('ignore_no_formats_error'):
|
||||||
continue
|
continue
|
||||||
raise Exception('Test definition incorrect. The output file cannot be known. \'ext\' key is not present')
|
raise Exception(f'Test {tname} definition incorrect - "ext" key must be present to define the output file')
|
||||||
|
|
||||||
if 'skip' in test_case:
|
if 'skip' in test_case:
|
||||||
print_skipping(test_case['skip'])
|
print_skipping(test_case['skip'])
|
||||||
@@ -128,7 +123,8 @@ def generator(test_case, tname):
|
|||||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||||
if is_playlist and 'playlist' not in test_case:
|
if is_playlist and 'playlist' not in test_case:
|
||||||
params.setdefault('extract_flat', 'in_playlist')
|
params.setdefault('extract_flat', 'in_playlist')
|
||||||
params.setdefault('playlistend', test_case.get('playlist_mincount'))
|
params.setdefault('playlistend', test_case.get(
|
||||||
|
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
||||||
params.setdefault('skip_download', True)
|
params.setdefault('skip_download', True)
|
||||||
|
|
||||||
ydl = YoutubeDL(params, auto_init=False)
|
ydl = YoutubeDL(params, auto_init=False)
|
||||||
@@ -146,6 +142,17 @@ def generator(test_case, tname):
|
|||||||
|
|
||||||
res_dict = None
|
res_dict = None
|
||||||
|
|
||||||
|
def match_exception(err):
|
||||||
|
expected_exception = test_case.get('expected_exception')
|
||||||
|
if not expected_exception:
|
||||||
|
return False
|
||||||
|
if err.__class__.__name__ == expected_exception:
|
||||||
|
return True
|
||||||
|
for exc in err.exc_info:
|
||||||
|
if exc.__class__.__name__ == expected_exception:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def try_rm_tcs_files(tcs=None):
|
def try_rm_tcs_files(tcs=None):
|
||||||
if tcs is None:
|
if tcs is None:
|
||||||
tcs = test_cases
|
tcs = test_cases
|
||||||
@@ -167,7 +174,10 @@ def generator(test_case, tname):
|
|||||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||||
except (DownloadError, ExtractorError) as err:
|
except (DownloadError, ExtractorError) as err:
|
||||||
# Check if the exception is not a network related one
|
# Check if the exception is not a network related one
|
||||||
if not err.exc_info[0] in (urllib.error.URLError, socket.timeout, UnavailableVideoError, http.client.BadStatusLine) or (err.exc_info[0] == urllib.error.HTTPError and err.exc_info[1].code == 503):
|
if not isinstance(err.exc_info[1], (TransportError, UnavailableVideoError)) or (isinstance(err.exc_info[1], HTTPError) and err.exc_info[1].status == 503):
|
||||||
|
if match_exception(err):
|
||||||
|
return
|
||||||
|
err.msg = f'{getattr(err, "msg", err)} ({tname})'
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if try_num == RETRIES:
|
if try_num == RETRIES:
|
||||||
@@ -177,6 +187,10 @@ def generator(test_case, tname):
|
|||||||
print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
|
print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
|
||||||
|
|
||||||
try_num += 1
|
try_num += 1
|
||||||
|
except YoutubeDLError as err:
|
||||||
|
if match_exception(err):
|
||||||
|
return
|
||||||
|
raise
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -216,6 +230,8 @@ def generator(test_case, tname):
|
|||||||
tc_res_dict = res_dict['entries'][tc_num]
|
tc_res_dict = res_dict['entries'][tc_num]
|
||||||
# First, check test cases' data against extracted data alone
|
# First, check test cases' data against extracted data alone
|
||||||
expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
|
expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
|
||||||
|
if tc_res_dict.get('_type', 'video') != 'video':
|
||||||
|
continue
|
||||||
# Now, check downloaded file consistency
|
# Now, check downloaded file consistency
|
||||||
tc_filename = get_tc_filename(tc)
|
tc_filename = get_tc_filename(tc)
|
||||||
if not test_case.get('params', {}).get('skip_download', False):
|
if not test_case.get('params', {}).get('skip_download', False):
|
||||||
@@ -250,44 +266,48 @@ def generator(test_case, tname):
|
|||||||
# extractor returns full results even with extract_flat
|
# extractor returns full results even with extract_flat
|
||||||
res_tcs = [{'info_dict': e} for e in res_dict['entries']]
|
res_tcs = [{'info_dict': e} for e in res_dict['entries']]
|
||||||
try_rm_tcs_files(res_tcs)
|
try_rm_tcs_files(res_tcs)
|
||||||
|
ydl.close()
|
||||||
return test_template
|
return test_template
|
||||||
|
|
||||||
|
|
||||||
# And add them to TestDownload
|
# And add them to TestDownload
|
||||||
tests_counter = {}
|
def inject_tests(test_cases, label=''):
|
||||||
for test_case in defs:
|
for test_case in test_cases:
|
||||||
name = test_case['name']
|
name = test_case['name']
|
||||||
i = tests_counter.get(name, 0)
|
tname = join_nonempty('test', name, label, tests_counter[name][label], delim='_')
|
||||||
tests_counter[name] = i + 1
|
tests_counter[name][label] += 1
|
||||||
tname = f'test_{name}_{i}' if i else f'test_{name}'
|
|
||||||
test_method = generator(test_case, tname)
|
test_method = generator(test_case, tname)
|
||||||
test_method.__name__ = str(tname)
|
test_method.__name__ = tname
|
||||||
ie_list = test_case.get('add_ie')
|
test_method.add_ie = ','.join(test_case.get('add_ie', []))
|
||||||
test_method.add_ie = ie_list and ','.join(ie_list)
|
setattr(TestDownload, test_method.__name__, test_method)
|
||||||
setattr(TestDownload, test_method.__name__, test_method)
|
|
||||||
del test_method
|
|
||||||
|
|
||||||
|
|
||||||
def batch_generator(name, num_tests):
|
inject_tests(normal_test_cases)
|
||||||
|
|
||||||
|
# TODO: disable redirection to the IE to ensure we are actually testing the webpage extraction
|
||||||
|
inject_tests(webpage_test_cases, 'webpage')
|
||||||
|
|
||||||
|
|
||||||
|
def batch_generator(name):
|
||||||
def test_template(self):
|
def test_template(self):
|
||||||
for i in range(num_tests):
|
for label, num_tests in tests_counter[name].items():
|
||||||
test_name = f'test_{name}_{i}' if i else f'test_{name}'
|
for i in range(num_tests):
|
||||||
try:
|
test_name = join_nonempty('test', name, label, i, delim='_')
|
||||||
getattr(self, test_name)()
|
try:
|
||||||
except unittest.SkipTest:
|
getattr(self, test_name)()
|
||||||
print(f'Skipped {test_name}')
|
except unittest.SkipTest:
|
||||||
|
print(f'Skipped {test_name}')
|
||||||
|
|
||||||
return test_template
|
return test_template
|
||||||
|
|
||||||
|
|
||||||
for name, num_tests in tests_counter.items():
|
for name in tests_counter:
|
||||||
test_method = batch_generator(name, num_tests)
|
test_method = batch_generator(name)
|
||||||
test_method.__name__ = f'test_{name}_all'
|
test_method.__name__ = f'test_{name}_all'
|
||||||
test_method.add_ie = ''
|
test_method.add_ie = ''
|
||||||
setattr(TestDownload, test_method.__name__, test_method)
|
setattr(TestDownload, test_method.__name__, test_method)
|
||||||
del test_method
|
del test_method
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
139
test/test_downloader_external.py
Normal file
139
test/test_downloader_external.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import http.cookiejar
|
||||||
|
|
||||||
|
from test.helper import FakeYDL
|
||||||
|
from yt_dlp.downloader.external import (
|
||||||
|
Aria2cFD,
|
||||||
|
AxelFD,
|
||||||
|
CurlFD,
|
||||||
|
FFmpegFD,
|
||||||
|
HttpieFD,
|
||||||
|
WgetFD,
|
||||||
|
)
|
||||||
|
|
||||||
|
TEST_COOKIE = {
|
||||||
|
'version': 0,
|
||||||
|
'name': 'test',
|
||||||
|
'value': 'ytdlp',
|
||||||
|
'port': None,
|
||||||
|
'port_specified': False,
|
||||||
|
'domain': '.example.com',
|
||||||
|
'domain_specified': True,
|
||||||
|
'domain_initial_dot': False,
|
||||||
|
'path': '/',
|
||||||
|
'path_specified': True,
|
||||||
|
'secure': False,
|
||||||
|
'expires': None,
|
||||||
|
'discard': False,
|
||||||
|
'comment': None,
|
||||||
|
'comment_url': None,
|
||||||
|
'rest': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_INFO = {'url': 'http://www.example.com/'}
|
||||||
|
|
||||||
|
|
||||||
|
class TestHttpieFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = HttpieFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['http', '--download', '--output', 'test', 'http://www.example.com/', 'Cookie:test=ytdlp'])
|
||||||
|
|
||||||
|
|
||||||
|
class TestAxelFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = AxelFD(ydl, {})
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertEqual(
|
||||||
|
downloader._make_cmd('test', TEST_INFO),
|
||||||
|
['axel', '-o', 'test', '-H', 'Cookie: test=ytdlp', '--max-redirect=0', '--', 'http://www.example.com/'])
|
||||||
|
|
||||||
|
|
||||||
|
class TestWgetFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = WgetFD(ydl, {})
|
||||||
|
self.assertNotIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--load-cookies', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
class TestCurlFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = CurlFD(ydl, {})
|
||||||
|
self.assertNotIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
# Test cookie header is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
self.assertIn('--cookie', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
self.assertIn('test=ytdlp', downloader._make_cmd('test', TEST_INFO))
|
||||||
|
|
||||||
|
|
||||||
|
class TestAria2cFD(unittest.TestCase):
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = Aria2cFD(ydl, {})
|
||||||
|
downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertFalse(hasattr(downloader, '_cookies_tempfile'))
|
||||||
|
|
||||||
|
# Test cookiejar tempfile arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
cmd = downloader._make_cmd('test', TEST_INFO)
|
||||||
|
self.assertIn(f'--load-cookies={downloader._cookies_tempfile}', cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipUnless(FFmpegFD.available(), 'ffmpeg not found')
|
||||||
|
class TestFFmpegFD(unittest.TestCase):
|
||||||
|
_args = []
|
||||||
|
|
||||||
|
def _test_cmd(self, args):
|
||||||
|
self._args = args
|
||||||
|
|
||||||
|
def test_make_cmd(self):
|
||||||
|
with FakeYDL() as ydl:
|
||||||
|
downloader = FFmpegFD(ydl, {})
|
||||||
|
downloader._debug_cmd = self._test_cmd
|
||||||
|
|
||||||
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
|
||||||
|
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
# Test cookies arg is added
|
||||||
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-cookies', 'test=ytdlp; path=/; domain=.example.com;\r\n',
|
||||||
|
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
# Test with non-url input (ffmpeg reads from stdin '-' for websockets)
|
||||||
|
downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
|
||||||
|
self.assertEqual(self._args, [
|
||||||
|
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -16,6 +16,7 @@ from test.helper import http_server_port, try_rm
|
|||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.downloader.http import HttpFD
|
from yt_dlp.downloader.http import HttpFD
|
||||||
from yt_dlp.utils import encodeFilename
|
from yt_dlp.utils import encodeFilename
|
||||||
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
@@ -67,17 +68,6 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
assert False
|
assert False
|
||||||
|
|
||||||
|
|
||||||
class FakeLogger:
|
|
||||||
def debug(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def warning(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TestHttpFD(unittest.TestCase):
|
class TestHttpFD(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.httpd = http.server.HTTPServer(
|
self.httpd = http.server.HTTPServer(
|
||||||
@@ -95,8 +85,8 @@ class TestHttpFD(unittest.TestCase):
|
|||||||
try_rm(encodeFilename(filename))
|
try_rm(encodeFilename(filename))
|
||||||
self.assertTrue(downloader.real_download(filename, {
|
self.assertTrue(downloader.real_download(filename, {
|
||||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
||||||
}))
|
}), ep)
|
||||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
||||||
try_rm(encodeFilename(filename))
|
try_rm(encodeFilename(filename))
|
||||||
|
|
||||||
def download_all(self, params):
|
def download_all(self, params):
|
||||||
|
|||||||
@@ -11,41 +11,49 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
import contextlib
|
import contextlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp.utils import encodeArgument
|
from yt_dlp.utils import Popen
|
||||||
|
|
||||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
LAZY_EXTRACTORS = 'yt_dlp/extractor/lazy_extractors.py'
|
||||||
|
|
||||||
try:
|
|
||||||
_DEV_NULL = subprocess.DEVNULL
|
|
||||||
except AttributeError:
|
|
||||||
_DEV_NULL = open(os.devnull, 'wb')
|
|
||||||
|
|
||||||
|
|
||||||
class TestExecution(unittest.TestCase):
|
class TestExecution(unittest.TestCase):
|
||||||
def test_import(self):
|
def run_yt_dlp(self, exe=(sys.executable, 'yt_dlp/__main__.py'), opts=('--version', )):
|
||||||
subprocess.check_call([sys.executable, '-c', 'import yt_dlp'], cwd=rootDir)
|
stdout, stderr, returncode = Popen.run(
|
||||||
|
[*exe, '--ignore-config', *opts], cwd=rootDir, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
def test_module_exec(self):
|
print(stderr, file=sys.stderr)
|
||||||
subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
self.assertEqual(returncode, 0)
|
||||||
|
return stdout.strip(), stderr.strip()
|
||||||
|
|
||||||
def test_main_exec(self):
|
def test_main_exec(self):
|
||||||
subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
self.run_yt_dlp()
|
||||||
|
|
||||||
|
def test_import(self):
|
||||||
|
self.run_yt_dlp(exe=(sys.executable, '-c', 'import yt_dlp'))
|
||||||
|
|
||||||
|
def test_module_exec(self):
|
||||||
|
self.run_yt_dlp(exe=(sys.executable, '-m', 'yt_dlp'))
|
||||||
|
|
||||||
def test_cmdline_umlauts(self):
|
def test_cmdline_umlauts(self):
|
||||||
p = subprocess.Popen(
|
_, stderr = self.run_yt_dlp(opts=('ä', '--version'))
|
||||||
[sys.executable, 'yt_dlp/__main__.py', '--ignore-config', encodeArgument('ä'), '--version'],
|
|
||||||
cwd=rootDir, stdout=_DEV_NULL, stderr=subprocess.PIPE)
|
|
||||||
_, stderr = p.communicate()
|
|
||||||
self.assertFalse(stderr)
|
self.assertFalse(stderr)
|
||||||
|
|
||||||
def test_lazy_extractors(self):
|
def test_lazy_extractors(self):
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'], cwd=rootDir, stdout=_DEV_NULL)
|
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', LAZY_EXTRACTORS],
|
||||||
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=_DEV_NULL)
|
cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||||
|
self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
|
||||||
|
|
||||||
|
_, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
|
||||||
|
# `MIN_RECOMMENDED` emits a deprecated feature warning for deprecated python versions
|
||||||
|
if stderr and stderr.startswith('Deprecated Feature: Support for Python'):
|
||||||
|
stderr = ''
|
||||||
|
self.assertFalse(stderr)
|
||||||
|
|
||||||
|
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||||
finally:
|
finally:
|
||||||
with contextlib.suppress(OSError):
|
with contextlib.suppress(OSError):
|
||||||
os.remove('yt_dlp/extractor/lazy_extractors.py')
|
os.remove(LAZY_EXTRACTORS)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -1,192 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import http.server
|
|
||||||
import ssl
|
|
||||||
import threading
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
from test.helper import http_server_port
|
|
||||||
from yt_dlp import YoutubeDL
|
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
if self.path == '/video.html':
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
elif self.path == '/vid.mp4':
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'video/mp4')
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(b'\x00\x00\x00\x00\x20\x66\x74[video]')
|
|
||||||
elif self.path == '/%E4%B8%AD%E6%96%87.html':
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(b'<html><video src="/vid.mp4" /></html>')
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
|
|
||||||
|
|
||||||
class FakeLogger:
|
|
||||||
def debug(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def warning(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TestHTTP(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.httpd = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
self.port = http_server_port(self.httpd)
|
|
||||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
|
|
||||||
class TestHTTPS(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
|
||||||
self.httpd = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
||||||
sslctx.load_cert_chain(certfn, None)
|
|
||||||
self.httpd.socket = sslctx.wrap_socket(self.httpd.socket, server_side=True)
|
|
||||||
self.port = http_server_port(self.httpd)
|
|
||||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
def test_nocheckcertificate(self):
|
|
||||||
ydl = YoutubeDL({'logger': FakeLogger()})
|
|
||||||
self.assertRaises(
|
|
||||||
Exception,
|
|
||||||
ydl.extract_info, 'https://127.0.0.1:%d/video.html' % self.port)
|
|
||||||
|
|
||||||
ydl = YoutubeDL({'logger': FakeLogger(), 'nocheckcertificate': True})
|
|
||||||
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
|
|
||||||
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
|
||||||
|
|
||||||
|
|
||||||
class TestClientCert(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
|
||||||
self.certdir = os.path.join(TEST_DIR, 'testdata', 'certificate')
|
|
||||||
cacertfn = os.path.join(self.certdir, 'ca.crt')
|
|
||||||
self.httpd = http.server.HTTPServer(('127.0.0.1', 0), HTTPTestRequestHandler)
|
|
||||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
||||||
sslctx.verify_mode = ssl.CERT_REQUIRED
|
|
||||||
sslctx.load_verify_locations(cafile=cacertfn)
|
|
||||||
sslctx.load_cert_chain(certfn, None)
|
|
||||||
self.httpd.socket = sslctx.wrap_socket(self.httpd.socket, server_side=True)
|
|
||||||
self.port = http_server_port(self.httpd)
|
|
||||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
|
||||||
self.server_thread.daemon = True
|
|
||||||
self.server_thread.start()
|
|
||||||
|
|
||||||
def _run_test(self, **params):
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'logger': FakeLogger(),
|
|
||||||
# Disable client-side validation of unacceptable self-signed testcert.pem
|
|
||||||
# The test is of a check on the server side, so unaffected
|
|
||||||
'nocheckcertificate': True,
|
|
||||||
**params,
|
|
||||||
})
|
|
||||||
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
|
|
||||||
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
|
||||||
|
|
||||||
def test_certificate_combined_nopass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'clientwithkey.crt'))
|
|
||||||
|
|
||||||
def test_certificate_nocombined_nopass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'client.crt'),
|
|
||||||
client_certificate_key=os.path.join(self.certdir, 'client.key'))
|
|
||||||
|
|
||||||
def test_certificate_combined_pass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
|
|
||||||
client_certificate_password='foobar')
|
|
||||||
|
|
||||||
def test_certificate_nocombined_pass(self):
|
|
||||||
self._run_test(client_certificate=os.path.join(self.certdir, 'client.crt'),
|
|
||||||
client_certificate_key=os.path.join(self.certdir, 'clientencrypted.key'),
|
|
||||||
client_certificate_password='foobar')
|
|
||||||
|
|
||||||
|
|
||||||
def _build_proxy_handler(name):
|
|
||||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|
||||||
proxy_name = name
|
|
||||||
|
|
||||||
def log_message(self, format, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def do_GET(self):
|
|
||||||
self.send_response(200)
|
|
||||||
self.send_header('Content-Type', 'text/plain; charset=utf-8')
|
|
||||||
self.end_headers()
|
|
||||||
self.wfile.write(f'{self.proxy_name}: {self.path}'.encode())
|
|
||||||
return HTTPTestRequestHandler
|
|
||||||
|
|
||||||
|
|
||||||
class TestProxy(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.proxy = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), _build_proxy_handler('normal'))
|
|
||||||
self.port = http_server_port(self.proxy)
|
|
||||||
self.proxy_thread = threading.Thread(target=self.proxy.serve_forever)
|
|
||||||
self.proxy_thread.daemon = True
|
|
||||||
self.proxy_thread.start()
|
|
||||||
|
|
||||||
self.geo_proxy = http.server.HTTPServer(
|
|
||||||
('127.0.0.1', 0), _build_proxy_handler('geo'))
|
|
||||||
self.geo_port = http_server_port(self.geo_proxy)
|
|
||||||
self.geo_proxy_thread = threading.Thread(target=self.geo_proxy.serve_forever)
|
|
||||||
self.geo_proxy_thread.daemon = True
|
|
||||||
self.geo_proxy_thread.start()
|
|
||||||
|
|
||||||
def test_proxy(self):
|
|
||||||
geo_proxy = f'127.0.0.1:{self.geo_port}'
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'proxy': f'127.0.0.1:{self.port}',
|
|
||||||
'geo_verification_proxy': geo_proxy,
|
|
||||||
})
|
|
||||||
url = 'http://foo.com/bar'
|
|
||||||
response = ydl.urlopen(url).read().decode()
|
|
||||||
self.assertEqual(response, f'normal: {url}')
|
|
||||||
|
|
||||||
req = urllib.request.Request(url)
|
|
||||||
req.add_header('Ytdl-request-proxy', geo_proxy)
|
|
||||||
response = ydl.urlopen(req).read().decode()
|
|
||||||
self.assertEqual(response, f'geo: {url}')
|
|
||||||
|
|
||||||
def test_proxy_with_idn(self):
|
|
||||||
ydl = YoutubeDL({
|
|
||||||
'proxy': f'127.0.0.1:{self.port}',
|
|
||||||
})
|
|
||||||
url = 'http://中文.tw/'
|
|
||||||
response = ydl.urlopen(url).read().decode()
|
|
||||||
# b'xn--fiq228c' is '中文'.encode('idna')
|
|
||||||
self.assertEqual(response, 'normal: http://xn--fiq228c.tw/')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -7,175 +7,373 @@ import unittest
|
|||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
from yt_dlp.jsinterp import JSInterpreter
|
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
||||||
|
|
||||||
|
|
||||||
|
class NaN:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TestJSInterpreter(unittest.TestCase):
|
class TestJSInterpreter(unittest.TestCase):
|
||||||
|
def _test(self, jsi_or_code, expected, func='f', args=()):
|
||||||
|
if isinstance(jsi_or_code, str):
|
||||||
|
jsi_or_code = JSInterpreter(jsi_or_code)
|
||||||
|
got = jsi_or_code.call_function(func, *args)
|
||||||
|
if expected is NaN:
|
||||||
|
self.assertTrue(math.isnan(got), f'{got} is not NaN')
|
||||||
|
else:
|
||||||
|
self.assertEqual(got, expected)
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
jsi = JSInterpreter('function x(){;}')
|
jsi = JSInterpreter('function f(){;}')
|
||||||
self.assertEqual(jsi.call_function('x'), None)
|
self.assertEqual(repr(jsi.extract_function('f')), 'F<f>')
|
||||||
|
self._test(jsi, None)
|
||||||
|
|
||||||
jsi = JSInterpreter('function x3(){return 42;}')
|
self._test('function f(){return 42;}', 42)
|
||||||
self.assertEqual(jsi.call_function('x3'), 42)
|
self._test('function f(){42}', None)
|
||||||
|
self._test('var f = function(){return 42;}', 42)
|
||||||
|
|
||||||
jsi = JSInterpreter('var x5 = function(){return 42;}')
|
def test_add(self):
|
||||||
self.assertEqual(jsi.call_function('x5'), 42)
|
self._test('function f(){return 42 + 7;}', 49)
|
||||||
|
self._test('function f(){return 42 + undefined;}', NaN)
|
||||||
|
self._test('function f(){return 42 + null;}', 42)
|
||||||
|
|
||||||
|
def test_sub(self):
|
||||||
|
self._test('function f(){return 42 - 7;}', 35)
|
||||||
|
self._test('function f(){return 42 - undefined;}', NaN)
|
||||||
|
self._test('function f(){return 42 - null;}', 42)
|
||||||
|
|
||||||
|
def test_mul(self):
|
||||||
|
self._test('function f(){return 42 * 7;}', 294)
|
||||||
|
self._test('function f(){return 42 * undefined;}', NaN)
|
||||||
|
self._test('function f(){return 42 * null;}', 0)
|
||||||
|
|
||||||
|
def test_div(self):
|
||||||
|
jsi = JSInterpreter('function f(a, b){return a / b;}')
|
||||||
|
self._test(jsi, NaN, args=(0, 0))
|
||||||
|
self._test(jsi, NaN, args=(JS_Undefined, 1))
|
||||||
|
self._test(jsi, float('inf'), args=(2, 0))
|
||||||
|
self._test(jsi, 0, args=(0, 3))
|
||||||
|
|
||||||
|
def test_mod(self):
|
||||||
|
self._test('function f(){return 42 % 7;}', 0)
|
||||||
|
self._test('function f(){return 42 % 0;}', NaN)
|
||||||
|
self._test('function f(){return 42 % undefined;}', NaN)
|
||||||
|
|
||||||
|
def test_exp(self):
|
||||||
|
self._test('function f(){return 42 ** 2;}', 1764)
|
||||||
|
self._test('function f(){return 42 ** undefined;}', NaN)
|
||||||
|
self._test('function f(){return 42 ** null;}', 1)
|
||||||
|
self._test('function f(){return undefined ** 42;}', NaN)
|
||||||
|
|
||||||
def test_calc(self):
|
def test_calc(self):
|
||||||
jsi = JSInterpreter('function x4(a){return 2*a+1;}')
|
self._test('function f(a){return 2*a+1;}', 7, args=[3])
|
||||||
self.assertEqual(jsi.call_function('x4', 3), 7)
|
|
||||||
|
|
||||||
def test_empty_return(self):
|
def test_empty_return(self):
|
||||||
jsi = JSInterpreter('function f(){return; y()}')
|
self._test('function f(){return; y()}', None)
|
||||||
self.assertEqual(jsi.call_function('f'), None)
|
|
||||||
|
|
||||||
def test_morespace(self):
|
def test_morespace(self):
|
||||||
jsi = JSInterpreter('function x (a) { return 2 * a + 1 ; }')
|
self._test('function f (a) { return 2 * a + 1 ; }', 7, args=[3])
|
||||||
self.assertEqual(jsi.call_function('x', 3), 7)
|
self._test('function f () { x = 2 ; return x; }', 2)
|
||||||
|
|
||||||
jsi = JSInterpreter('function f () { x = 2 ; return x; }')
|
|
||||||
self.assertEqual(jsi.call_function('f'), 2)
|
|
||||||
|
|
||||||
def test_strange_chars(self):
|
def test_strange_chars(self):
|
||||||
jsi = JSInterpreter('function $_xY1 ($_axY1) { var $_axY2 = $_axY1 + 1; return $_axY2; }')
|
self._test('function $_xY1 ($_axY1) { var $_axY2 = $_axY1 + 1; return $_axY2; }',
|
||||||
self.assertEqual(jsi.call_function('$_xY1', 20), 21)
|
21, args=[20], func='$_xY1')
|
||||||
|
|
||||||
def test_operators(self):
|
def test_operators(self):
|
||||||
jsi = JSInterpreter('function f(){return 1 << 5;}')
|
self._test('function f(){return 1 << 5;}', 32)
|
||||||
self.assertEqual(jsi.call_function('f'), 32)
|
self._test('function f(){return 2 ** 5}', 32)
|
||||||
|
self._test('function f(){return 19 & 21;}', 17)
|
||||||
jsi = JSInterpreter('function f(){return 19 & 21;}')
|
self._test('function f(){return 11 >> 2;}', 2)
|
||||||
self.assertEqual(jsi.call_function('f'), 17)
|
self._test('function f(){return []? 2+3: 4;}', 5)
|
||||||
|
self._test('function f(){return 1 == 2}', False)
|
||||||
jsi = JSInterpreter('function f(){return 11 >> 2;}')
|
self._test('function f(){return 0 && 1 || 2;}', 2)
|
||||||
self.assertEqual(jsi.call_function('f'), 2)
|
self._test('function f(){return 0 ?? 42;}', 0)
|
||||||
|
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
||||||
|
|
||||||
def test_array_access(self):
|
def test_array_access(self):
|
||||||
jsi = JSInterpreter('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2] = 7; return x;}')
|
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
||||||
self.assertEqual(jsi.call_function('f'), [5, 2, 7])
|
|
||||||
|
|
||||||
def test_parens(self):
|
def test_parens(self):
|
||||||
jsi = JSInterpreter('function f(){return (1) + (2) * ((( (( (((((3)))))) )) ));}')
|
self._test('function f(){return (1) + (2) * ((( (( (((((3)))))) )) ));}', 7)
|
||||||
self.assertEqual(jsi.call_function('f'), 7)
|
self._test('function f(){return (1 + 2) * 3;}', 9)
|
||||||
|
|
||||||
jsi = JSInterpreter('function f(){return (1 + 2) * 3;}')
|
def test_quotes(self):
|
||||||
self.assertEqual(jsi.call_function('f'), 9)
|
self._test(R'function f(){return "a\"\\("}', R'a"\(')
|
||||||
|
|
||||||
def test_assignments(self):
|
def test_assignments(self):
|
||||||
jsi = JSInterpreter('function f(){var x = 20; x = 30 + 1; return x;}')
|
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
||||||
self.assertEqual(jsi.call_function('f'), 31)
|
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
||||||
|
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
||||||
jsi = JSInterpreter('function f(){var x = 20; x += 30 + 1; return x;}')
|
|
||||||
self.assertEqual(jsi.call_function('f'), 51)
|
|
||||||
|
|
||||||
jsi = JSInterpreter('function f(){var x = 20; x -= 30 + 1; return x;}')
|
|
||||||
self.assertEqual(jsi.call_function('f'), -11)
|
|
||||||
|
|
||||||
|
@unittest.skip('Not implemented')
|
||||||
def test_comments(self):
|
def test_comments(self):
|
||||||
'Skipping: Not yet fully implemented'
|
self._test('''
|
||||||
return
|
function f() {
|
||||||
jsi = JSInterpreter('''
|
var x = /* 1 + */ 2;
|
||||||
function x() {
|
var y = /* 30
|
||||||
var x = /* 1 + */ 2;
|
* 40 */ 50;
|
||||||
var y = /* 30
|
return x + y;
|
||||||
* 40 */ 50;
|
}
|
||||||
return x + y;
|
''', 52)
|
||||||
}
|
|
||||||
''')
|
|
||||||
self.assertEqual(jsi.call_function('x'), 52)
|
|
||||||
|
|
||||||
jsi = JSInterpreter('''
|
self._test('''
|
||||||
function f() {
|
function f() {
|
||||||
var x = "/*";
|
var x = "/*";
|
||||||
var y = 1 /* comment */ + 2;
|
var y = 1 /* comment */ + 2;
|
||||||
return y;
|
return y;
|
||||||
}
|
}
|
||||||
''')
|
''', 3)
|
||||||
self.assertEqual(jsi.call_function('f'), 3)
|
|
||||||
|
|
||||||
def test_precedence(self):
|
def test_precedence(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('''
|
||||||
function x() {
|
function f() {
|
||||||
var a = [10, 20, 30, 40, 50];
|
var a = [10, 20, 30, 40, 50];
|
||||||
var b = 6;
|
var b = 6;
|
||||||
a[0]=a[b%a.length];
|
a[0]=a[b%a.length];
|
||||||
return a;
|
return a;
|
||||||
}''')
|
}
|
||||||
self.assertEqual(jsi.call_function('x'), [20, 20, 30, 40, 50])
|
''', [20, 20, 30, 40, 50])
|
||||||
|
|
||||||
|
def test_builtins(self):
|
||||||
|
self._test('function f() { return NaN }', NaN)
|
||||||
|
|
||||||
|
def test_date(self):
|
||||||
|
self._test('function f() { return new Date("Wednesday 31 December 1969 18:01:26 MDT") - 0; }', 86000)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function f(dt) { return new Date(dt) - 0; }')
|
||||||
|
self._test(jsi, 86000, args=['Wednesday 31 December 1969 18:01:26 MDT'])
|
||||||
|
self._test(jsi, 86000, args=['12/31/1969 18:01:26 MDT']) # m/d/y
|
||||||
|
self._test(jsi, 0, args=['1 January 1970 00:00:00 UTC'])
|
||||||
|
|
||||||
def test_call(self):
|
def test_call(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { return 2; }
|
function x() { return 2; }
|
||||||
function y(a) { return x() + a; }
|
function y(a) { return x() + (a?a:0); }
|
||||||
function z() { return y(3); }
|
function z() { return y(3); }
|
||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('z'), 5)
|
self._test(jsi, 5, func='z')
|
||||||
|
self._test(jsi, 2, func='y')
|
||||||
|
|
||||||
|
def test_if(self):
|
||||||
|
self._test('''
|
||||||
|
function f() {
|
||||||
|
let a = 9;
|
||||||
|
if (0==0) {a++}
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
''', 10)
|
||||||
|
|
||||||
|
self._test('''
|
||||||
|
function f() {
|
||||||
|
if (0==0) {return 10}
|
||||||
|
}
|
||||||
|
''', 10)
|
||||||
|
|
||||||
|
self._test('''
|
||||||
|
function f() {
|
||||||
|
if (0!=0) {return 1}
|
||||||
|
else {return 10}
|
||||||
|
}
|
||||||
|
''', 10)
|
||||||
|
|
||||||
|
""" # Unsupported
|
||||||
|
self._test('''
|
||||||
|
function f() {
|
||||||
|
if (0!=0) {return 1}
|
||||||
|
else if (1==0) {return 2}
|
||||||
|
else {return 10}
|
||||||
|
}
|
||||||
|
''', 10)
|
||||||
|
"""
|
||||||
|
|
||||||
def test_for_loop(self):
|
def test_for_loop(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('function f() { a=0; for (i=0; i-10; i++) {a++} return a }', 10)
|
||||||
function x() { a=0; for (i=0; i-10; i++) {a++} a }
|
|
||||||
''')
|
|
||||||
self.assertEqual(jsi.call_function('x'), 10)
|
|
||||||
|
|
||||||
def test_switch(self):
|
def test_switch(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x(f) { switch(f){
|
function f(x) { switch(x){
|
||||||
case 1:f+=1;
|
case 1:x+=1;
|
||||||
case 2:f+=2;
|
case 2:x+=2;
|
||||||
case 3:f+=3;break;
|
case 3:x+=3;break;
|
||||||
case 4:f+=4;
|
case 4:x+=4;
|
||||||
default:f=0;
|
default:x=0;
|
||||||
} return f }
|
} return x }
|
||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x', 1), 7)
|
self._test(jsi, 7, args=[1])
|
||||||
self.assertEqual(jsi.call_function('x', 3), 6)
|
self._test(jsi, 6, args=[3])
|
||||||
self.assertEqual(jsi.call_function('x', 5), 0)
|
self._test(jsi, 0, args=[5])
|
||||||
|
|
||||||
def test_switch_default(self):
|
def test_switch_default(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x(f) { switch(f){
|
function f(x) { switch(x){
|
||||||
case 2: f+=2;
|
case 2: x+=2;
|
||||||
default: f-=1;
|
default: x-=1;
|
||||||
case 5:
|
case 5:
|
||||||
case 6: f+=6;
|
case 6: x+=6;
|
||||||
case 0: break;
|
case 0: break;
|
||||||
case 1: f+=1;
|
case 1: x+=1;
|
||||||
} return f }
|
} return x }
|
||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x', 1), 2)
|
self._test(jsi, 2, args=[1])
|
||||||
self.assertEqual(jsi.call_function('x', 5), 11)
|
self._test(jsi, 11, args=[5])
|
||||||
self.assertEqual(jsi.call_function('x', 9), 14)
|
self._test(jsi, 14, args=[9])
|
||||||
|
|
||||||
def test_try(self):
|
def test_try(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('function f() { try{return 10} catch(e){return 5} }', 10)
|
||||||
function x() { try{return 10} catch(e){return 5} }
|
|
||||||
''')
|
def test_catch(self):
|
||||||
self.assertEqual(jsi.call_function('x'), 10)
|
self._test('function f() { try{throw 10} catch(e){return 5} }', 5)
|
||||||
|
|
||||||
|
def test_finally(self):
|
||||||
|
self._test('function f() { try{throw 10} finally {return 42} }', 42)
|
||||||
|
self._test('function f() { try{throw 10} catch(e){return 5} finally {return 42} }', 42)
|
||||||
|
|
||||||
|
def test_nested_try(self):
|
||||||
|
self._test('''
|
||||||
|
function f() {try {
|
||||||
|
try{throw 10} finally {throw 42}
|
||||||
|
} catch(e){return 5} }
|
||||||
|
''', 5)
|
||||||
|
|
||||||
def test_for_loop_continue(self):
|
def test_for_loop_continue(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('function f() { a=0; for (i=0; i-10; i++) { continue; a++ } return a }', 0)
|
||||||
function x() { a=0; for (i=0; i-10; i++) { continue; a++ } a }
|
|
||||||
''')
|
|
||||||
self.assertEqual(jsi.call_function('x'), 0)
|
|
||||||
|
|
||||||
def test_for_loop_break(self):
|
def test_for_loop_break(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('function f() { a=0; for (i=0; i-10; i++) { break; a++ } return a }', 0)
|
||||||
function x() { a=0; for (i=0; i-10; i++) { break; a++ } a }
|
|
||||||
''')
|
def test_for_loop_try(self):
|
||||||
self.assertEqual(jsi.call_function('x'), 0)
|
self._test('''
|
||||||
|
function f() {
|
||||||
|
for (i=0; i-10; i++) { try { if (i == 5) throw i} catch {return 10} finally {break} };
|
||||||
|
return 42 }
|
||||||
|
''', 42)
|
||||||
|
|
||||||
def test_literal_list(self):
|
def test_literal_list(self):
|
||||||
jsi = JSInterpreter('''
|
self._test('function f() { return [1, 2, "asdf", [5, 6, 7]][3] }', [5, 6, 7])
|
||||||
function x() { [1, 2, "asdf", [5, 6, 7]][3] }
|
|
||||||
''')
|
|
||||||
self.assertEqual(jsi.call_function('x'), [5, 6, 7])
|
|
||||||
|
|
||||||
def test_comma(self):
|
def test_comma(self):
|
||||||
|
self._test('function f() { a=5; a -= 1, a+=3; return a }', 7)
|
||||||
|
self._test('function f() { a=5; return (a -= 1, a+=3, a); }', 7)
|
||||||
|
self._test('function f() { return (l=[0,1,2,3], function(a, b){return a+b})((l[1], l[2]), l[3]) }', 5)
|
||||||
|
|
||||||
|
def test_void(self):
|
||||||
|
self._test('function f() { return void 42; }', None)
|
||||||
|
|
||||||
|
def test_return_function(self):
|
||||||
jsi = JSInterpreter('''
|
jsi = JSInterpreter('''
|
||||||
function x() { a=5; a -= 1, a+=3; return a }
|
function f() { return [1, function(){return 1}][1] }
|
||||||
''')
|
''')
|
||||||
self.assertEqual(jsi.call_function('x'), 7)
|
self.assertEqual(jsi.call_function('f')([]), 1)
|
||||||
|
|
||||||
|
def test_null(self):
|
||||||
|
self._test('function f() { return null; }', None)
|
||||||
|
self._test('function f() { return [null > 0, null < 0, null == 0, null === 0]; }',
|
||||||
|
[False, False, False, False])
|
||||||
|
self._test('function f() { return [null >= 0, null <= 0]; }', [True, True])
|
||||||
|
|
||||||
|
def test_undefined(self):
|
||||||
|
self._test('function f() { return undefined === undefined; }', True)
|
||||||
|
self._test('function f() { return undefined; }', JS_Undefined)
|
||||||
|
self._test('function f() {return undefined ?? 42; }', 42)
|
||||||
|
self._test('function f() { let v; return v; }', JS_Undefined)
|
||||||
|
self._test('function f() { let v; return v**0; }', 1)
|
||||||
|
self._test('function f() { let v; return [v>42, v<=42, v&&42, 42&&v]; }',
|
||||||
|
[False, False, JS_Undefined, JS_Undefined])
|
||||||
|
|
||||||
|
self._test('''
|
||||||
|
function f() { return [
|
||||||
|
undefined === undefined,
|
||||||
|
undefined == undefined,
|
||||||
|
undefined == null,
|
||||||
|
undefined < undefined,
|
||||||
|
undefined > undefined,
|
||||||
|
undefined === 0,
|
||||||
|
undefined == 0,
|
||||||
|
undefined < 0,
|
||||||
|
undefined > 0,
|
||||||
|
undefined >= 0,
|
||||||
|
undefined <= 0,
|
||||||
|
undefined > null,
|
||||||
|
undefined < null,
|
||||||
|
undefined === null
|
||||||
|
]; }
|
||||||
|
''', list(map(bool, (1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))))
|
||||||
|
|
||||||
|
jsi = JSInterpreter('''
|
||||||
|
function f() { let v; return [42+v, v+42, v**42, 42**v, 0**v]; }
|
||||||
|
''')
|
||||||
|
for y in jsi.call_function('f'):
|
||||||
|
self.assertTrue(math.isnan(y))
|
||||||
|
|
||||||
|
def test_object(self):
|
||||||
|
self._test('function f() { return {}; }', {})
|
||||||
|
self._test('function f() { let a = {m1: 42, m2: 0 }; return [a["m1"], a.m2]; }', [42, 0])
|
||||||
|
self._test('function f() { let a; return a?.qq; }', JS_Undefined)
|
||||||
|
self._test('function f() { let a = {m1: 42, m2: 0 }; return a?.qq; }', JS_Undefined)
|
||||||
|
|
||||||
|
def test_regex(self):
|
||||||
|
self._test('function f() { let a=/,,[/,913,/](,)}/; }', None)
|
||||||
|
self._test('function f() { let a=/,,[/,913,/](,)}/; return a; }', R'/,,[/,913,/](,)}/0')
|
||||||
|
|
||||||
|
R''' # We are not compiling regex
|
||||||
|
jsi = JSInterpreter('function f() { let a=/,,[/,913,/](,)}/; return a; }')
|
||||||
|
self.assertIsInstance(jsi.call_function('f'), re.Pattern)
|
||||||
|
|
||||||
|
jsi = JSInterpreter('function f() { let a=/,,[/,913,/](,)}/i; return a; }')
|
||||||
|
self.assertEqual(jsi.call_function('f').flags & re.I, re.I)
|
||||||
|
|
||||||
|
jsi = JSInterpreter(R'function f() { let a=/,][}",],()}(\[)/; return a; }')
|
||||||
|
self.assertEqual(jsi.call_function('f').pattern, r',][}",],()}(\[)')
|
||||||
|
|
||||||
|
jsi = JSInterpreter(R'function f() { let a=[/[)\\]/]; return a[0]; }')
|
||||||
|
self.assertEqual(jsi.call_function('f').pattern, r'[)\\]')
|
||||||
|
'''
|
||||||
|
|
||||||
|
@unittest.skip('Not implemented')
|
||||||
|
def test_replace(self):
|
||||||
|
self._test('function f() { let a="data-name".replace("data-", ""); return a }',
|
||||||
|
'name')
|
||||||
|
self._test('function f() { let a="data-name".replace(new RegExp("^.+-"), ""); return a; }',
|
||||||
|
'name')
|
||||||
|
self._test('function f() { let a="data-name".replace(/^.+-/, ""); return a; }',
|
||||||
|
'name')
|
||||||
|
self._test('function f() { let a="data-name".replace(/a/g, "o"); return a; }',
|
||||||
|
'doto-nome')
|
||||||
|
self._test('function f() { let a="data-name".replaceAll("a", "o"); return a; }',
|
||||||
|
'doto-nome')
|
||||||
|
|
||||||
|
def test_char_code_at(self):
|
||||||
|
jsi = JSInterpreter('function f(i){return "test".charCodeAt(i)}')
|
||||||
|
self._test(jsi, 116, args=[0])
|
||||||
|
self._test(jsi, 101, args=[1])
|
||||||
|
self._test(jsi, 115, args=[2])
|
||||||
|
self._test(jsi, 116, args=[3])
|
||||||
|
self._test(jsi, None, args=[4])
|
||||||
|
self._test(jsi, 116, args=['not_a_number'])
|
||||||
|
|
||||||
|
def test_bitwise_operators_overflow(self):
|
||||||
|
self._test('function f(){return -524999584 << 5}', 379882496)
|
||||||
|
self._test('function f(){return 1236566549 << 5}', 915423904)
|
||||||
|
|
||||||
|
def test_bitwise_operators_typecast(self):
|
||||||
|
self._test('function f(){return null << 5}', 0)
|
||||||
|
self._test('function f(){return undefined >> 5}', 0)
|
||||||
|
self._test('function f(){return 42 << NaN}', 42)
|
||||||
|
|
||||||
|
def test_negative(self):
|
||||||
|
self._test('function f(){return 2 * -2.0 ;}', -4)
|
||||||
|
self._test('function f(){return 2 - - -2 ;}', 0)
|
||||||
|
self._test('function f(){return 2 - - - -2 ;}', 4)
|
||||||
|
self._test('function f(){return 2 - + + - -2;}', 0)
|
||||||
|
self._test('function f(){return 2 + - + - -2;}', 0)
|
||||||
|
|
||||||
|
@unittest.skip('Not implemented')
|
||||||
|
def test_packed(self):
|
||||||
|
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
||||||
|
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
1439
test/test_networking.py
Normal file
1439
test/test_networking.py
Normal file
File diff suppressed because it is too large
Load Diff
282
test/test_networking_utils.py
Normal file
282
test/test_networking_utils.py
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import io
|
||||||
|
import platform
|
||||||
|
import random
|
||||||
|
import ssl
|
||||||
|
import urllib.error
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.dependencies import certifi
|
||||||
|
from yt_dlp.networking import Response
|
||||||
|
from yt_dlp.networking._helper import (
|
||||||
|
InstanceStoreMixin,
|
||||||
|
add_accept_encoding_header,
|
||||||
|
get_redirect_method,
|
||||||
|
make_socks_proxy_opts,
|
||||||
|
select_proxy,
|
||||||
|
ssl_load_certs,
|
||||||
|
)
|
||||||
|
from yt_dlp.networking.exceptions import (
|
||||||
|
HTTPError,
|
||||||
|
IncompleteRead,
|
||||||
|
_CompatHTTPError,
|
||||||
|
)
|
||||||
|
from yt_dlp.socks import ProxyType
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
class TestNetworkingUtils:
|
||||||
|
|
||||||
|
def test_select_proxy(self):
|
||||||
|
proxies = {
|
||||||
|
'all': 'socks5://example.com',
|
||||||
|
'http': 'http://example.com:1080',
|
||||||
|
'no': 'bypass.example.com,yt-dl.org'
|
||||||
|
}
|
||||||
|
|
||||||
|
assert select_proxy('https://example.com', proxies) == proxies['all']
|
||||||
|
assert select_proxy('http://example.com', proxies) == proxies['http']
|
||||||
|
assert select_proxy('http://bypass.example.com', proxies) is None
|
||||||
|
assert select_proxy('https://yt-dl.org', proxies) is None
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('socks_proxy,expected', [
|
||||||
|
('socks5h://example.com', {
|
||||||
|
'proxytype': ProxyType.SOCKS5,
|
||||||
|
'addr': 'example.com',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': True,
|
||||||
|
'username': None,
|
||||||
|
'password': None
|
||||||
|
}),
|
||||||
|
('socks5://user:@example.com:5555', {
|
||||||
|
'proxytype': ProxyType.SOCKS5,
|
||||||
|
'addr': 'example.com',
|
||||||
|
'port': 5555,
|
||||||
|
'rdns': False,
|
||||||
|
'username': 'user',
|
||||||
|
'password': ''
|
||||||
|
}),
|
||||||
|
('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
|
||||||
|
'proxytype': ProxyType.SOCKS4,
|
||||||
|
'addr': '127.0.0.1',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': False,
|
||||||
|
'username': 'u@ser',
|
||||||
|
'password': 'pa ss'
|
||||||
|
}),
|
||||||
|
('socks4a://:pa%20ss@127.0.0.1', {
|
||||||
|
'proxytype': ProxyType.SOCKS4A,
|
||||||
|
'addr': '127.0.0.1',
|
||||||
|
'port': 1080,
|
||||||
|
'rdns': True,
|
||||||
|
'username': '',
|
||||||
|
'password': 'pa ss'
|
||||||
|
})
|
||||||
|
])
|
||||||
|
def test_make_socks_proxy_opts(self, socks_proxy, expected):
|
||||||
|
assert make_socks_proxy_opts(socks_proxy) == expected
|
||||||
|
|
||||||
|
def test_make_socks_proxy_unknown(self):
|
||||||
|
with pytest.raises(ValueError, match='Unknown SOCKS proxy version: socks'):
|
||||||
|
make_socks_proxy_opts('socks://127.0.0.1')
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not certifi, reason='certifi is not installed')
|
||||||
|
def test_load_certifi(self):
|
||||||
|
context_certifi = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
context_certifi.load_verify_locations(cafile=certifi.where())
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
ssl_load_certs(context, use_certifi=True)
|
||||||
|
assert context.get_ca_certs() == context_certifi.get_ca_certs()
|
||||||
|
|
||||||
|
context_default = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
context_default.load_default_certs()
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
ssl_load_certs(context, use_certifi=False)
|
||||||
|
assert context.get_ca_certs() == context_default.get_ca_certs()
|
||||||
|
|
||||||
|
if context_default.get_ca_certs() == context_certifi.get_ca_certs():
|
||||||
|
pytest.skip('System uses certifi as default. The test is not valid')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('method,status,expected', [
|
||||||
|
('GET', 303, 'GET'),
|
||||||
|
('HEAD', 303, 'HEAD'),
|
||||||
|
('PUT', 303, 'GET'),
|
||||||
|
('POST', 301, 'GET'),
|
||||||
|
('HEAD', 301, 'HEAD'),
|
||||||
|
('POST', 302, 'GET'),
|
||||||
|
('HEAD', 302, 'HEAD'),
|
||||||
|
('PUT', 302, 'PUT'),
|
||||||
|
('POST', 308, 'POST'),
|
||||||
|
('POST', 307, 'POST'),
|
||||||
|
('HEAD', 308, 'HEAD'),
|
||||||
|
('HEAD', 307, 'HEAD'),
|
||||||
|
])
|
||||||
|
def test_get_redirect_method(self, method, status, expected):
|
||||||
|
assert get_redirect_method(method, status) == expected
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('headers,supported_encodings,expected', [
|
||||||
|
({'Accept-Encoding': 'br'}, ['gzip', 'br'], {'Accept-Encoding': 'br'}),
|
||||||
|
({}, ['gzip', 'br'], {'Accept-Encoding': 'gzip, br'}),
|
||||||
|
({'Content-type': 'application/json'}, [], {'Content-type': 'application/json', 'Accept-Encoding': 'identity'}),
|
||||||
|
])
|
||||||
|
def test_add_accept_encoding_header(self, headers, supported_encodings, expected):
|
||||||
|
headers = HTTPHeaderDict(headers)
|
||||||
|
add_accept_encoding_header(headers, supported_encodings)
|
||||||
|
assert headers == HTTPHeaderDict(expected)
|
||||||
|
|
||||||
|
|
||||||
|
class TestInstanceStoreMixin:
|
||||||
|
|
||||||
|
class FakeInstanceStoreMixin(InstanceStoreMixin):
|
||||||
|
def _create_instance(self, **kwargs):
|
||||||
|
return random.randint(0, 1000000)
|
||||||
|
|
||||||
|
def _close_instance(self, instance):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_mixin(self):
|
||||||
|
mixin = self.FakeInstanceStoreMixin()
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}}) == mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'e', 4}}) != mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}})
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1, 'b': 2, 'c': {'d', 4}} != mixin._get_instance(d={'a': 1, 'b': 2, 'g': {'d', 4}}))
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) == mixin._get_instance(d={'a': 1}, e=[1, 2, 3])
|
||||||
|
|
||||||
|
assert mixin._get_instance(d={'a': 1}, e=[1, 2, 3]) != mixin._get_instance(d={'a': 1}, e=[1, 2, 3, 4])
|
||||||
|
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
assert mixin._get_instance(b=[1, 2], c=cookiejar) == mixin._get_instance(b=[1, 2], c=cookiejar)
|
||||||
|
|
||||||
|
assert mixin._get_instance(b=[1, 2], c=cookiejar) != mixin._get_instance(b=[1, 2], c=YoutubeDLCookieJar())
|
||||||
|
|
||||||
|
# Different order
|
||||||
|
assert mixin._get_instance(c=cookiejar, b=[1, 2]) == mixin._get_instance(b=[1, 2], c=cookiejar)
|
||||||
|
|
||||||
|
m = mixin._get_instance(t=1234)
|
||||||
|
assert mixin._get_instance(t=1234) == m
|
||||||
|
mixin._clear_instances()
|
||||||
|
assert mixin._get_instance(t=1234) != m
|
||||||
|
|
||||||
|
|
||||||
|
class TestNetworkingExceptions:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_response(status):
|
||||||
|
return Response(fp=io.BytesIO(b'test'), url='http://example.com', headers={'tesT': 'test'}, status=status)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('http_error_class', [HTTPError, lambda r: _CompatHTTPError(HTTPError(r))])
|
||||||
|
def test_http_error(self, http_error_class):
|
||||||
|
|
||||||
|
response = self.create_response(403)
|
||||||
|
error = http_error_class(response)
|
||||||
|
|
||||||
|
assert error.status == 403
|
||||||
|
assert str(error) == error.msg == 'HTTP Error 403: Forbidden'
|
||||||
|
assert error.reason == response.reason
|
||||||
|
assert error.response is response
|
||||||
|
|
||||||
|
data = error.response.read()
|
||||||
|
assert data == b'test'
|
||||||
|
assert repr(error) == '<HTTPError 403: Forbidden>'
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('http_error_class', [HTTPError, lambda *args, **kwargs: _CompatHTTPError(HTTPError(*args, **kwargs))])
|
||||||
|
def test_redirect_http_error(self, http_error_class):
|
||||||
|
response = self.create_response(301)
|
||||||
|
error = http_error_class(response, redirect_loop=True)
|
||||||
|
assert str(error) == error.msg == 'HTTP Error 301: Moved Permanently (redirect loop detected)'
|
||||||
|
assert error.reason == 'Moved Permanently'
|
||||||
|
|
||||||
|
def test_compat_http_error(self):
|
||||||
|
response = self.create_response(403)
|
||||||
|
error = _CompatHTTPError(HTTPError(response))
|
||||||
|
assert isinstance(error, HTTPError)
|
||||||
|
assert isinstance(error, urllib.error.HTTPError)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def raises_deprecation_warning():
|
||||||
|
with warnings.catch_warnings(record=True) as w:
|
||||||
|
warnings.simplefilter('always')
|
||||||
|
yield
|
||||||
|
|
||||||
|
if len(w) == 0:
|
||||||
|
pytest.fail('Did not raise DeprecationWarning')
|
||||||
|
if len(w) > 1:
|
||||||
|
pytest.fail(f'Raised multiple warnings: {w}')
|
||||||
|
|
||||||
|
if not issubclass(w[-1].category, DeprecationWarning):
|
||||||
|
pytest.fail(f'Expected DeprecationWarning, got {w[-1].category}')
|
||||||
|
w.clear()
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.code == 403
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.getcode() == 403
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.hdrs is error.response.headers
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.info() is error.response.headers
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.headers is error.response.headers
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.filename == error.response.url
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.url == error.response.url
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.geturl() == error.response.url
|
||||||
|
|
||||||
|
# Passthrough file operations
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert error.read() == b'test'
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
assert not error.closed
|
||||||
|
|
||||||
|
with raises_deprecation_warning():
|
||||||
|
# Technically Response operations are also passed through, which should not be used.
|
||||||
|
assert error.get_header('test') == 'test'
|
||||||
|
|
||||||
|
# Should not raise a warning
|
||||||
|
error.close()
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
platform.python_implementation() == 'PyPy', reason='garbage collector works differently in pypy')
|
||||||
|
def test_compat_http_error_autoclose(self):
|
||||||
|
# Compat HTTPError should not autoclose response
|
||||||
|
response = self.create_response(403)
|
||||||
|
_CompatHTTPError(HTTPError(response))
|
||||||
|
assert not response.closed
|
||||||
|
|
||||||
|
def test_incomplete_read_error(self):
|
||||||
|
error = IncompleteRead(4, 3, cause='test')
|
||||||
|
assert isinstance(error, IncompleteRead)
|
||||||
|
assert repr(error) == '<IncompleteRead: 4 bytes read, 3 more expected>'
|
||||||
|
assert str(error) == error.msg == '4 bytes read, 3 more expected'
|
||||||
|
assert error.partial == 4
|
||||||
|
assert error.expected == 3
|
||||||
|
assert error.cause == 'test'
|
||||||
|
|
||||||
|
error = IncompleteRead(3)
|
||||||
|
assert repr(error) == '<IncompleteRead: 3 bytes read>'
|
||||||
|
assert str(error) == '3 bytes read'
|
||||||
73
test/test_plugins.py
Normal file
73
test/test_plugins.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
|
||||||
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
||||||
|
|
||||||
|
|
||||||
|
class TestPlugins(unittest.TestCase):
|
||||||
|
|
||||||
|
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||||
|
|
||||||
|
def test_directories_containing_plugins(self):
|
||||||
|
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||||
|
|
||||||
|
def test_extractor_classes(self):
|
||||||
|
for module_name in tuple(sys.modules):
|
||||||
|
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
||||||
|
del sys.modules[module_name]
|
||||||
|
plugins_ie = load_plugins('extractor', 'IE')
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# don't load modules with underscore prefix
|
||||||
|
self.assertFalse(
|
||||||
|
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
|
||||||
|
'loaded module beginning with underscore')
|
||||||
|
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# Don't load extractors with underscore prefix
|
||||||
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
# Don't load extractors not specified in __all__ (if supplied)
|
||||||
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
def test_postprocessor_classes(self):
|
||||||
|
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||||
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
|
def test_importing_zipped_module(self):
|
||||||
|
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||||
|
shutil.make_archive(str(zip_path)[:-4], 'zip', str(zip_path)[:-4])
|
||||||
|
sys.path.append(str(zip_path)) # add zip to search paths
|
||||||
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
try:
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
|
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
|
plugins_ie = load_plugins('extractor', 'IE')
|
||||||
|
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
|
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||||
|
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(zip_path))
|
||||||
|
os.remove(zip_path)
|
||||||
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -16,6 +16,7 @@ from yt_dlp.postprocessor import (
|
|||||||
MetadataFromFieldPP,
|
MetadataFromFieldPP,
|
||||||
MetadataParserPP,
|
MetadataParserPP,
|
||||||
ModifyChaptersPP,
|
ModifyChaptersPP,
|
||||||
|
SponsorBlockPP,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -76,11 +77,15 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._pp = ModifyChaptersPP(YoutubeDL())
|
self._pp = ModifyChaptersPP(YoutubeDL())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _sponsor_chapter(start, end, cat, remove=False):
|
def _sponsor_chapter(start, end, cat, remove=False, title=None):
|
||||||
c = {'start_time': start, 'end_time': end, '_categories': [(cat, start, end)]}
|
if title is None:
|
||||||
if remove:
|
title = SponsorBlockPP.CATEGORIES[cat]
|
||||||
c['remove'] = True
|
return {
|
||||||
return c
|
'start_time': start,
|
||||||
|
'end_time': end,
|
||||||
|
'_categories': [(cat, start, end, title)],
|
||||||
|
**({'remove': True} if remove else {}),
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _chapter(start, end, title=None, remove=False):
|
def _chapter(start, end, title=None, remove=False):
|
||||||
@@ -130,6 +135,19 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
|
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||||
|
chapters = self._chapters([70], ['c']) + [
|
||||||
|
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
||||||
|
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
||||||
|
self._sponsor_chapter(30, 40, 'preview'),
|
||||||
|
self._sponsor_chapter(50, 60, 'filler')]
|
||||||
|
expected = self._chapters(
|
||||||
|
[10, 15, 16, 20, 30, 40, 50, 60, 70],
|
||||||
|
['c', '[SponsorBlock]: sb c1', '[SponsorBlock]: sb c1, sb c2', '[SponsorBlock]: sb c1',
|
||||||
|
'c', '[SponsorBlock]: Preview/Recap',
|
||||||
|
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||||
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
||||||
chapters = self._chapters([120], ['c']) + [
|
chapters = self._chapters([120], ['c']) + [
|
||||||
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||||
@@ -173,7 +191,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
||||||
cuts = [self._sponsor_chapter(20, 50, 'selpromo', remove=True)]
|
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
||||||
chapters = self._chapters([60], ['c']) + [
|
chapters = self._chapters([60], ['c']) + [
|
||||||
self._sponsor_chapter(10, 20, 'intro'),
|
self._sponsor_chapter(10, 20, 'intro'),
|
||||||
self._sponsor_chapter(30, 40, 'sponsor'),
|
self._sponsor_chapter(30, 40, 'sponsor'),
|
||||||
@@ -199,7 +217,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
||||||
self._chapter(30, 40, remove=True),
|
self._chapter(30, 40, remove=True),
|
||||||
self._sponsor_chapter(40, 50, 'selpromo', remove=True),
|
self._sponsor_chapter(40, 50, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(50, 60, 'interaction')]
|
self._sponsor_chapter(50, 60, 'interaction')]
|
||||||
expected = self._chapters([10, 20, 30, 40],
|
expected = self._chapters([10, 20, 30, 40],
|
||||||
['c', '[SponsorBlock]: Sponsor',
|
['c', '[SponsorBlock]: Sponsor',
|
||||||
@@ -282,7 +300,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = self._chapters([70], ['c']) + [
|
||||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 50, 'interaction'),
|
self._sponsor_chapter(20, 50, 'interaction'),
|
||||||
self._sponsor_chapter(30, 50, 'selpromo', remove=True),
|
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(40, 60, 'sponsor'),
|
self._sponsor_chapter(40, 60, 'sponsor'),
|
||||||
self._sponsor_chapter(50, 60, 'interaction')]
|
self._sponsor_chapter(50, 60, 'interaction')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
|
|||||||
@@ -1,113 +1,450 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import contextlib
|
||||||
|
import enum
|
||||||
|
import functools
|
||||||
|
import http.server
|
||||||
|
import json
|
||||||
import random
|
import random
|
||||||
import subprocess
|
import socket
|
||||||
import urllib.request
|
import struct
|
||||||
|
import time
|
||||||
|
from socketserver import (
|
||||||
|
BaseRequestHandler,
|
||||||
|
StreamRequestHandler,
|
||||||
|
ThreadingTCPServer,
|
||||||
|
)
|
||||||
|
|
||||||
from test.helper import FakeYDL, get_params, is_download_test
|
from test.helper import http_server_port
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.networking.exceptions import ProxyError, TransportError
|
||||||
|
from yt_dlp.socks import (
|
||||||
|
SOCKS4_REPLY_VERSION,
|
||||||
|
SOCKS4_VERSION,
|
||||||
|
SOCKS5_USER_AUTH_SUCCESS,
|
||||||
|
SOCKS5_USER_AUTH_VERSION,
|
||||||
|
SOCKS5_VERSION,
|
||||||
|
Socks5AddressType,
|
||||||
|
Socks5Auth,
|
||||||
|
)
|
||||||
|
|
||||||
|
SOCKS5_USER_AUTH_FAILURE = 0x1
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
class Socks4CD(enum.IntEnum):
|
||||||
class TestMultipleSocks(unittest.TestCase):
|
REQUEST_GRANTED = 90
|
||||||
@staticmethod
|
REQUEST_REJECTED_OR_FAILED = 91
|
||||||
def _check_params(attrs):
|
REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD = 92
|
||||||
params = get_params()
|
REQUEST_REJECTED_DIFFERENT_USERID = 93
|
||||||
for attr in attrs:
|
|
||||||
if attr not in params:
|
|
||||||
print('Missing %s. Skipping.' % attr)
|
class Socks5Reply(enum.IntEnum):
|
||||||
|
SUCCEEDED = 0x0
|
||||||
|
GENERAL_FAILURE = 0x1
|
||||||
|
CONNECTION_NOT_ALLOWED = 0x2
|
||||||
|
NETWORK_UNREACHABLE = 0x3
|
||||||
|
HOST_UNREACHABLE = 0x4
|
||||||
|
CONNECTION_REFUSED = 0x5
|
||||||
|
TTL_EXPIRED = 0x6
|
||||||
|
COMMAND_NOT_SUPPORTED = 0x7
|
||||||
|
ADDRESS_TYPE_NOT_SUPPORTED = 0x8
|
||||||
|
|
||||||
|
|
||||||
|
class SocksTestRequestHandler(BaseRequestHandler):
|
||||||
|
|
||||||
|
def __init__(self, *args, socks_info=None, **kwargs):
|
||||||
|
self.socks_info = socks_info
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class SocksProxyHandler(BaseRequestHandler):
|
||||||
|
def __init__(self, request_handler_class, socks_server_kwargs, *args, **kwargs):
|
||||||
|
self.socks_kwargs = socks_server_kwargs or {}
|
||||||
|
self.request_handler_class = request_handler_class
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Socks5ProxyHandler(StreamRequestHandler, SocksProxyHandler):
|
||||||
|
|
||||||
|
# SOCKS5 protocol https://tools.ietf.org/html/rfc1928
|
||||||
|
# SOCKS5 username/password authentication https://tools.ietf.org/html/rfc1929
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
sleep = self.socks_kwargs.get('sleep')
|
||||||
|
if sleep:
|
||||||
|
time.sleep(sleep)
|
||||||
|
version, nmethods = self.connection.recv(2)
|
||||||
|
assert version == SOCKS5_VERSION
|
||||||
|
methods = list(self.connection.recv(nmethods))
|
||||||
|
|
||||||
|
auth = self.socks_kwargs.get('auth')
|
||||||
|
|
||||||
|
if auth is not None and Socks5Auth.AUTH_USER_PASS not in methods:
|
||||||
|
self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NO_ACCEPTABLE))
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
|
||||||
|
elif Socks5Auth.AUTH_USER_PASS in methods:
|
||||||
|
self.connection.sendall(struct.pack("!BB", SOCKS5_VERSION, Socks5Auth.AUTH_USER_PASS))
|
||||||
|
|
||||||
|
_, user_len = struct.unpack('!BB', self.connection.recv(2))
|
||||||
|
username = self.connection.recv(user_len).decode()
|
||||||
|
pass_len = ord(self.connection.recv(1))
|
||||||
|
password = self.connection.recv(pass_len).decode()
|
||||||
|
|
||||||
|
if username == auth[0] and password == auth[1]:
|
||||||
|
self.connection.sendall(struct.pack('!BB', SOCKS5_USER_AUTH_VERSION, SOCKS5_USER_AUTH_SUCCESS))
|
||||||
|
else:
|
||||||
|
self.connection.sendall(struct.pack('!BB', SOCKS5_USER_AUTH_VERSION, SOCKS5_USER_AUTH_FAILURE))
|
||||||
|
self.server.close_request(self.request)
|
||||||
return
|
return
|
||||||
return params
|
|
||||||
|
|
||||||
def test_proxy_http(self):
|
elif Socks5Auth.AUTH_NONE in methods:
|
||||||
params = self._check_params(['primary_proxy', 'primary_server_ip'])
|
self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NONE))
|
||||||
if params is None:
|
else:
|
||||||
return
|
self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_NO_ACCEPTABLE))
|
||||||
ydl = FakeYDL({
|
self.server.close_request(self.request)
|
||||||
'proxy': params['primary_proxy']
|
|
||||||
})
|
|
||||||
self.assertEqual(
|
|
||||||
ydl.urlopen('http://yt-dl.org/ip').read().decode(),
|
|
||||||
params['primary_server_ip'])
|
|
||||||
|
|
||||||
def test_proxy_https(self):
|
|
||||||
params = self._check_params(['primary_proxy', 'primary_server_ip'])
|
|
||||||
if params is None:
|
|
||||||
return
|
|
||||||
ydl = FakeYDL({
|
|
||||||
'proxy': params['primary_proxy']
|
|
||||||
})
|
|
||||||
self.assertEqual(
|
|
||||||
ydl.urlopen('https://yt-dl.org/ip').read().decode(),
|
|
||||||
params['primary_server_ip'])
|
|
||||||
|
|
||||||
def test_secondary_proxy_http(self):
|
|
||||||
params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
|
|
||||||
if params is None:
|
|
||||||
return
|
|
||||||
ydl = FakeYDL()
|
|
||||||
req = urllib.request.Request('http://yt-dl.org/ip')
|
|
||||||
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
|
|
||||||
self.assertEqual(
|
|
||||||
ydl.urlopen(req).read().decode(),
|
|
||||||
params['secondary_server_ip'])
|
|
||||||
|
|
||||||
def test_secondary_proxy_https(self):
|
|
||||||
params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
|
|
||||||
if params is None:
|
|
||||||
return
|
|
||||||
ydl = FakeYDL()
|
|
||||||
req = urllib.request.Request('https://yt-dl.org/ip')
|
|
||||||
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
|
|
||||||
self.assertEqual(
|
|
||||||
ydl.urlopen(req).read().decode(),
|
|
||||||
params['secondary_server_ip'])
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
class TestSocks(unittest.TestCase):
|
|
||||||
_SKIP_SOCKS_TEST = True
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
if self._SKIP_SOCKS_TEST:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
self.port = random.randint(20000, 30000)
|
version, command, _, address_type = struct.unpack('!BBBB', self.connection.recv(4))
|
||||||
self.server_process = subprocess.Popen([
|
socks_info = {
|
||||||
'srelay', '-f', '-i', '127.0.0.1:%d' % self.port],
|
'version': version,
|
||||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
'auth_methods': methods,
|
||||||
|
'command': command,
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'ipv4_address': None,
|
||||||
|
'domain_address': None,
|
||||||
|
'ipv6_address': None,
|
||||||
|
}
|
||||||
|
if address_type == Socks5AddressType.ATYP_IPV4:
|
||||||
|
socks_info['ipv4_address'] = socket.inet_ntoa(self.connection.recv(4))
|
||||||
|
elif address_type == Socks5AddressType.ATYP_DOMAINNAME:
|
||||||
|
socks_info['domain_address'] = self.connection.recv(ord(self.connection.recv(1))).decode()
|
||||||
|
elif address_type == Socks5AddressType.ATYP_IPV6:
|
||||||
|
socks_info['ipv6_address'] = socket.inet_ntop(socket.AF_INET6, self.connection.recv(16))
|
||||||
|
else:
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
|
||||||
def tearDown(self):
|
socks_info['port'] = struct.unpack('!H', self.connection.recv(2))[0]
|
||||||
if self._SKIP_SOCKS_TEST:
|
|
||||||
|
# dummy response, the returned IP is just a placeholder
|
||||||
|
self.connection.sendall(struct.pack(
|
||||||
|
'!BBBBIH', SOCKS5_VERSION, self.socks_kwargs.get('reply', Socks5Reply.SUCCEEDED), 0x0, 0x1, 0x7f000001, 40000))
|
||||||
|
|
||||||
|
self.request_handler_class(self.request, self.client_address, self.server, socks_info=socks_info)
|
||||||
|
|
||||||
|
|
||||||
|
class Socks4ProxyHandler(StreamRequestHandler, SocksProxyHandler):
|
||||||
|
|
||||||
|
# SOCKS4 protocol http://www.openssh.com/txt/socks4.protocol
|
||||||
|
# SOCKS4A protocol http://www.openssh.com/txt/socks4a.protocol
|
||||||
|
|
||||||
|
def _read_until_null(self):
|
||||||
|
return b''.join(iter(functools.partial(self.connection.recv, 1), b'\x00'))
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
sleep = self.socks_kwargs.get('sleep')
|
||||||
|
if sleep:
|
||||||
|
time.sleep(sleep)
|
||||||
|
socks_info = {
|
||||||
|
'version': SOCKS4_VERSION,
|
||||||
|
'command': None,
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'ipv4_address': None,
|
||||||
|
'port': None,
|
||||||
|
'domain_address': None,
|
||||||
|
}
|
||||||
|
version, command, dest_port, dest_ip = struct.unpack('!BBHI', self.connection.recv(8))
|
||||||
|
socks_info['port'] = dest_port
|
||||||
|
socks_info['command'] = command
|
||||||
|
if version != SOCKS4_VERSION:
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
use_remote_dns = False
|
||||||
|
if 0x0 < dest_ip <= 0xFF:
|
||||||
|
use_remote_dns = True
|
||||||
|
else:
|
||||||
|
socks_info['ipv4_address'] = socket.inet_ntoa(struct.pack("!I", dest_ip))
|
||||||
|
|
||||||
|
user_id = self._read_until_null().decode()
|
||||||
|
if user_id != (self.socks_kwargs.get('user_id') or ''):
|
||||||
|
self.connection.sendall(struct.pack(
|
||||||
|
'!BBHI', SOCKS4_REPLY_VERSION, Socks4CD.REQUEST_REJECTED_DIFFERENT_USERID, 0x00, 0x00000000))
|
||||||
|
self.server.close_request(self.request)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.server_process.terminate()
|
if use_remote_dns:
|
||||||
self.server_process.communicate()
|
socks_info['domain_address'] = self._read_until_null().decode()
|
||||||
|
|
||||||
def _get_ip(self, protocol):
|
# dummy response, the returned IP is just a placeholder
|
||||||
if self._SKIP_SOCKS_TEST:
|
self.connection.sendall(
|
||||||
return '127.0.0.1'
|
struct.pack(
|
||||||
|
'!BBHI', SOCKS4_REPLY_VERSION,
|
||||||
|
self.socks_kwargs.get('cd_reply', Socks4CD.REQUEST_GRANTED), 40000, 0x7f000001))
|
||||||
|
|
||||||
ydl = FakeYDL({
|
self.request_handler_class(self.request, self.client_address, self.server, socks_info=socks_info)
|
||||||
'proxy': '%s://127.0.0.1:%d' % (protocol, self.port),
|
|
||||||
})
|
|
||||||
return ydl.urlopen('http://yt-dl.org/ip').read().decode()
|
|
||||||
|
|
||||||
def test_socks4(self):
|
|
||||||
self.assertTrue(isinstance(self._get_ip('socks4'), str))
|
|
||||||
|
|
||||||
def test_socks4a(self):
|
class IPv6ThreadingTCPServer(ThreadingTCPServer):
|
||||||
self.assertTrue(isinstance(self._get_ip('socks4a'), str))
|
address_family = socket.AF_INET6
|
||||||
|
|
||||||
def test_socks5(self):
|
|
||||||
self.assertTrue(isinstance(self._get_ip('socks5'), str))
|
class SocksHTTPTestRequestHandler(http.server.BaseHTTPRequestHandler, SocksTestRequestHandler):
|
||||||
|
def do_GET(self):
|
||||||
|
if self.path == '/socks_info':
|
||||||
|
payload = json.dumps(self.socks_info.copy())
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Type', 'application/json; charset=utf-8')
|
||||||
|
self.send_header('Content-Length', str(len(payload)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(payload.encode())
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def socks_server(socks_server_class, request_handler, bind_ip=None, **socks_server_kwargs):
|
||||||
|
server = server_thread = None
|
||||||
|
try:
|
||||||
|
bind_address = bind_ip or '127.0.0.1'
|
||||||
|
server_type = ThreadingTCPServer if '.' in bind_address else IPv6ThreadingTCPServer
|
||||||
|
server = server_type(
|
||||||
|
(bind_address, 0), functools.partial(socks_server_class, request_handler, socks_server_kwargs))
|
||||||
|
server_port = http_server_port(server)
|
||||||
|
server_thread = threading.Thread(target=server.serve_forever)
|
||||||
|
server_thread.daemon = True
|
||||||
|
server_thread.start()
|
||||||
|
if '.' not in bind_address:
|
||||||
|
yield f'[{bind_address}]:{server_port}'
|
||||||
|
else:
|
||||||
|
yield f'{bind_address}:{server_port}'
|
||||||
|
finally:
|
||||||
|
server.shutdown()
|
||||||
|
server.server_close()
|
||||||
|
server_thread.join(2.0)
|
||||||
|
|
||||||
|
|
||||||
|
class SocksProxyTestContext(abc.ABC):
|
||||||
|
REQUEST_HANDLER_CLASS = None
|
||||||
|
|
||||||
|
def socks_server(self, server_class, *args, **kwargs):
|
||||||
|
return socks_server(server_class, self.REQUEST_HANDLER_CLASS, *args, **kwargs)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs) -> dict:
|
||||||
|
"""return a dict of socks_info"""
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPSocksTestProxyContext(SocksProxyTestContext):
|
||||||
|
REQUEST_HANDLER_CLASS = SocksHTTPTestRequestHandler
|
||||||
|
|
||||||
|
def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'http://{target_domain or "127.0.0.1"}:{target_port or "40000"}/socks_info', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
CTX_MAP = {
|
||||||
|
'http': HTTPSocksTestProxyContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def ctx(request):
|
||||||
|
return CTX_MAP[request.param]()
|
||||||
|
|
||||||
|
|
||||||
|
class TestSocks4Proxy:
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks4_no_auth(self, handler, ctx):
|
||||||
|
with handler() as rh:
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
|
response = ctx.socks_info_request(
|
||||||
|
rh, proxies={'all': f'socks4://{server_address}'})
|
||||||
|
assert response['version'] == 4
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks4_auth(self, handler, ctx):
|
||||||
|
with handler() as rh:
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.socks_info_request(rh, proxies={'all': f'socks4://{server_address}'})
|
||||||
|
response = ctx.socks_info_request(
|
||||||
|
rh, proxies={'all': f'socks4://user:@{server_address}'})
|
||||||
|
assert response['version'] == 4
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks4a_ipv4_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
|
||||||
|
assert response['version'] == 4
|
||||||
|
assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks4a_domain_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='localhost')
|
||||||
|
assert response['version'] == 4
|
||||||
|
assert response['ipv4_address'] is None
|
||||||
|
assert response['domain_address'] == 'localhost'
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_ipv4_client_source_address(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
with handler(proxies={'all': f'socks4://{server_address}'},
|
||||||
|
source_address=source_address) as rh:
|
||||||
|
response = ctx.socks_info_request(rh)
|
||||||
|
assert response['client_address'][0] == source_address
|
||||||
|
assert response['version'] == 4
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
@pytest.mark.parametrize('reply_code', [
|
||||||
|
Socks4CD.REQUEST_REJECTED_OR_FAILED,
|
||||||
|
Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
|
||||||
|
Socks4CD.REQUEST_REJECTED_DIFFERENT_USERID,
|
||||||
|
])
|
||||||
|
def test_socks4_errors(self, handler, ctx, reply_code):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler, cd_reply=reply_code) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_ipv6_socks4_proxy(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
|
||||||
|
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
|
||||||
|
assert response['client_address'][0] == '::1'
|
||||||
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
|
assert response['version'] == 4
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_timeout(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
|
||||||
|
with pytest.raises(TransportError):
|
||||||
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSocks5Proxy:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5_no_auth(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh)
|
||||||
|
assert response['auth_methods'] == [0x0]
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5_user_pass(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
|
||||||
|
with handler() as rh:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.socks_info_request(rh, proxies={'all': f'socks5://{server_address}'})
|
||||||
|
|
||||||
|
response = ctx.socks_info_request(
|
||||||
|
rh, proxies={'all': f'socks5://test:testpass@{server_address}'})
|
||||||
|
|
||||||
|
assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5_ipv4_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
|
||||||
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5_domain_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='localhost')
|
||||||
|
assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5h_domain_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='localhost')
|
||||||
|
assert response['ipv4_address'] is None
|
||||||
|
assert response['domain_address'] == 'localhost'
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5h_ip_target(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
|
||||||
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
|
assert response['domain_address'] is None
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_socks5_ipv6_destination(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='[::1]')
|
||||||
|
assert response['ipv6_address'] == '::1'
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_ipv6_socks5_proxy(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
response = ctx.socks_info_request(rh, target_domain='127.0.0.1')
|
||||||
|
assert response['client_address'][0] == '::1'
|
||||||
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
# XXX: is there any feasible way of testing IPv6 source addresses?
|
||||||
|
# Same would go for non-proxy source_address test...
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_ipv4_client_source_address(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}, source_address=source_address) as rh:
|
||||||
|
response = ctx.socks_info_request(rh)
|
||||||
|
assert response['client_address'][0] == source_address
|
||||||
|
assert response['version'] == 5
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
@pytest.mark.parametrize('reply_code', [
|
||||||
|
Socks5Reply.GENERAL_FAILURE,
|
||||||
|
Socks5Reply.CONNECTION_NOT_ALLOWED,
|
||||||
|
Socks5Reply.NETWORK_UNREACHABLE,
|
||||||
|
Socks5Reply.HOST_UNREACHABLE,
|
||||||
|
Socks5Reply.CONNECTION_REFUSED,
|
||||||
|
Socks5Reply.TTL_EXPIRED,
|
||||||
|
Socks5Reply.COMMAND_NOT_SUPPORTED,
|
||||||
|
Socks5Reply.ADDRESS_TYPE_NOT_SUPPORTED,
|
||||||
|
])
|
||||||
|
def test_socks5_errors(self, handler, ctx, reply_code):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler, reply=reply_code) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
||||||
|
def test_timeout(self, handler, ctx):
|
||||||
|
with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
|
||||||
|
with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
|
||||||
|
with pytest.raises(TransportError):
|
||||||
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -2,8 +2,10 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
import warnings
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
@@ -12,6 +14,7 @@ import contextlib
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import subprocess
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import (
|
||||||
@@ -26,6 +29,7 @@ from yt_dlp.utils import (
|
|||||||
InAdvancePagedList,
|
InAdvancePagedList,
|
||||||
LazyList,
|
LazyList,
|
||||||
OnDemandPagedList,
|
OnDemandPagedList,
|
||||||
|
Popen,
|
||||||
age_restricted,
|
age_restricted,
|
||||||
args_to_str,
|
args_to_str,
|
||||||
base_url,
|
base_url,
|
||||||
@@ -39,19 +43,20 @@ from yt_dlp.utils import (
|
|||||||
datetime_from_str,
|
datetime_from_str,
|
||||||
detect_exe_version,
|
detect_exe_version,
|
||||||
determine_ext,
|
determine_ext,
|
||||||
|
determine_file_encoding,
|
||||||
dfxp2srt,
|
dfxp2srt,
|
||||||
dict_get,
|
dict_get,
|
||||||
encode_base_n,
|
encode_base_n,
|
||||||
encode_compat_str,
|
encode_compat_str,
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
escape_rfc3986,
|
|
||||||
escape_url,
|
|
||||||
expand_path,
|
expand_path,
|
||||||
extract_attributes,
|
extract_attributes,
|
||||||
|
extract_basic_auth,
|
||||||
find_xpath_attr,
|
find_xpath_attr,
|
||||||
fix_xml_ampersands,
|
fix_xml_ampersands,
|
||||||
float_or_none,
|
float_or_none,
|
||||||
format_bytes,
|
format_bytes,
|
||||||
|
get_compatible_ext,
|
||||||
get_element_by_attribute,
|
get_element_by_attribute,
|
||||||
get_element_by_class,
|
get_element_by_class,
|
||||||
get_element_html_by_attribute,
|
get_element_html_by_attribute,
|
||||||
@@ -99,14 +104,16 @@ from yt_dlp.utils import (
|
|||||||
sanitize_filename,
|
sanitize_filename,
|
||||||
sanitize_path,
|
sanitize_path,
|
||||||
sanitize_url,
|
sanitize_url,
|
||||||
sanitized_Request,
|
|
||||||
shell_quote,
|
shell_quote,
|
||||||
smuggle_url,
|
smuggle_url,
|
||||||
|
str_or_none,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
strip_jsonp,
|
strip_jsonp,
|
||||||
strip_or_none,
|
strip_or_none,
|
||||||
subtitles_filename,
|
subtitles_filename,
|
||||||
timeconvert,
|
timeconvert,
|
||||||
|
traverse_obj,
|
||||||
|
try_call,
|
||||||
unescapeHTML,
|
unescapeHTML,
|
||||||
unified_strdate,
|
unified_strdate,
|
||||||
unified_timestamp,
|
unified_timestamp,
|
||||||
@@ -118,12 +125,19 @@ from yt_dlp.utils import (
|
|||||||
urlencode_postdata,
|
urlencode_postdata,
|
||||||
urljoin,
|
urljoin,
|
||||||
urshift,
|
urshift,
|
||||||
|
variadic,
|
||||||
version_tuple,
|
version_tuple,
|
||||||
xpath_attr,
|
xpath_attr,
|
||||||
xpath_element,
|
xpath_element,
|
||||||
xpath_text,
|
xpath_text,
|
||||||
xpath_with_ns,
|
xpath_with_ns,
|
||||||
)
|
)
|
||||||
|
from yt_dlp.utils.networking import (
|
||||||
|
HTTPHeaderDict,
|
||||||
|
escape_rfc3986,
|
||||||
|
normalize_url,
|
||||||
|
remove_dot_segments,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestUtil(unittest.TestCase):
|
class TestUtil(unittest.TestCase):
|
||||||
@@ -138,13 +152,13 @@ class TestUtil(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(sanitize_filename('123'), '123')
|
self.assertEqual(sanitize_filename('123'), '123')
|
||||||
|
|
||||||
self.assertEqual('abc_de', sanitize_filename('abc/de'))
|
self.assertEqual('abc⧸de', sanitize_filename('abc/de'))
|
||||||
self.assertFalse('/' in sanitize_filename('abc/de///'))
|
self.assertFalse('/' in sanitize_filename('abc/de///'))
|
||||||
|
|
||||||
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
|
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', is_id=False))
|
||||||
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
|
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', is_id=False))
|
||||||
self.assertEqual('yes no', sanitize_filename('yes? no'))
|
self.assertEqual('yes no', sanitize_filename('yes? no', is_id=False))
|
||||||
self.assertEqual('this - that', sanitize_filename('this: that'))
|
self.assertEqual('this - that', sanitize_filename('this: that', is_id=False))
|
||||||
|
|
||||||
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
|
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
|
||||||
aumlaut = 'ä'
|
aumlaut = 'ä'
|
||||||
@@ -250,15 +264,6 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
|
self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
|
||||||
self.assertEqual(sanitize_url('foo bar'), 'foo bar')
|
self.assertEqual(sanitize_url('foo bar'), 'foo bar')
|
||||||
|
|
||||||
def test_extract_basic_auth(self):
|
|
||||||
auth_header = lambda url: sanitized_Request(url).get_header('Authorization')
|
|
||||||
self.assertFalse(auth_header('http://foo.bar'))
|
|
||||||
self.assertFalse(auth_header('http://:foo.bar'))
|
|
||||||
self.assertEqual(auth_header('http://@foo.bar'), 'Basic Og==')
|
|
||||||
self.assertEqual(auth_header('http://:pass@foo.bar'), 'Basic OnBhc3M=')
|
|
||||||
self.assertEqual(auth_header('http://user:@foo.bar'), 'Basic dXNlcjo=')
|
|
||||||
self.assertEqual(auth_header('http://user:pass@foo.bar'), 'Basic dXNlcjpwYXNz')
|
|
||||||
|
|
||||||
def test_expand_path(self):
|
def test_expand_path(self):
|
||||||
def env(var):
|
def env(var):
|
||||||
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
|
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
|
||||||
@@ -367,6 +372,7 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
|
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
|
||||||
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
|
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
|
||||||
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
|
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
|
||||||
|
self.assertEqual(unified_strdate('31-07-2022 20:00'), '20220731')
|
||||||
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
|
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
|
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
|
||||||
@@ -410,6 +416,10 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
||||||
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
||||||
|
|
||||||
|
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
||||||
|
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
||||||
|
self.assertEqual(unified_timestamp('12/31/1969 20:01:18 EDT', False), 78)
|
||||||
|
|
||||||
def test_determine_ext(self):
|
def test_determine_ext(self):
|
||||||
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
|
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
|
||||||
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
|
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
|
||||||
@@ -559,6 +569,7 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
||||||
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
||||||
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
||||||
|
self.assertEqual(base_url('http://foo.de/bar/baz&x=z&w=y/x/c'), 'http://foo.de/bar/baz&x=z&w=y/x/')
|
||||||
|
|
||||||
def test_urljoin(self):
|
def test_urljoin(self):
|
||||||
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
||||||
@@ -649,6 +660,8 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
|
self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
|
||||||
self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
|
self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
|
||||||
self.assertEqual(parse_duration('103:050'), 103.05)
|
self.assertEqual(parse_duration('103:050'), 103.05)
|
||||||
|
self.assertEqual(parse_duration('1HR 3MIN'), 3780)
|
||||||
|
self.assertEqual(parse_duration('2hrs 3mins'), 7380)
|
||||||
|
|
||||||
def test_fix_xml_ampersands(self):
|
def test_fix_xml_ampersands(self):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -895,7 +908,7 @@ class TestUtil(unittest.TestCase):
|
|||||||
'dynamic_range': 'HDR10',
|
'dynamic_range': 'HDR10',
|
||||||
})
|
})
|
||||||
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
||||||
'vcodec': 'av01.0.12M.10',
|
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
||||||
'acodec': 'none',
|
'acodec': 'none',
|
||||||
'dynamic_range': 'HDR10',
|
'dynamic_range': 'HDR10',
|
||||||
})
|
})
|
||||||
@@ -925,24 +938,124 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
|
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
|
||||||
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
|
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
|
||||||
|
|
||||||
def test_escape_url(self):
|
def test_normalize_url(self):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
|
normalize_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
|
||||||
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
|
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
|
normalize_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
|
||||||
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
|
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
escape_url('http://тест.рф/фрагмент'),
|
normalize_url('http://тест.рф/фрагмент'),
|
||||||
'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
|
'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
escape_url('http://тест.рф/абв?абв=абв#абв'),
|
normalize_url('http://тест.рф/абв?абв=абв#абв'),
|
||||||
'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
|
'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
|
||||||
)
|
)
|
||||||
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
self.assertEqual(normalize_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
||||||
|
|
||||||
|
self.assertEqual(normalize_url('http://www.example.com/../a/b/../c/./d.html'), 'http://www.example.com/a/c/d.html')
|
||||||
|
|
||||||
|
def test_remove_dot_segments(self):
|
||||||
|
self.assertEqual(remove_dot_segments('/a/b/c/./../../g'), '/a/g')
|
||||||
|
self.assertEqual(remove_dot_segments('mid/content=5/../6'), 'mid/6')
|
||||||
|
self.assertEqual(remove_dot_segments('/ad/../cd'), '/cd')
|
||||||
|
self.assertEqual(remove_dot_segments('/ad/../cd/'), '/cd/')
|
||||||
|
self.assertEqual(remove_dot_segments('/..'), '/')
|
||||||
|
self.assertEqual(remove_dot_segments('/./'), '/')
|
||||||
|
self.assertEqual(remove_dot_segments('/./a'), '/a')
|
||||||
|
self.assertEqual(remove_dot_segments('/abc/./.././d/././e/.././f/./../../ghi'), '/ghi')
|
||||||
|
self.assertEqual(remove_dot_segments('/'), '/')
|
||||||
|
self.assertEqual(remove_dot_segments('/t'), '/t')
|
||||||
|
self.assertEqual(remove_dot_segments('t'), 't')
|
||||||
|
self.assertEqual(remove_dot_segments(''), '')
|
||||||
|
self.assertEqual(remove_dot_segments('/../a/b/c'), '/a/b/c')
|
||||||
|
self.assertEqual(remove_dot_segments('../a'), 'a')
|
||||||
|
self.assertEqual(remove_dot_segments('./a'), 'a')
|
||||||
|
self.assertEqual(remove_dot_segments('.'), '')
|
||||||
|
self.assertEqual(remove_dot_segments('////'), '////')
|
||||||
|
|
||||||
|
def test_js_to_json_vars_strings(self):
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'null': a,
|
||||||
|
'nullStr': b,
|
||||||
|
'true': c,
|
||||||
|
'trueStr': d,
|
||||||
|
'false': e,
|
||||||
|
'falseStr': f,
|
||||||
|
'unresolvedVar': g,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': 'null',
|
||||||
|
'b': '"null"',
|
||||||
|
'c': 'true',
|
||||||
|
'd': '"true"',
|
||||||
|
'e': 'false',
|
||||||
|
'f': '"false"',
|
||||||
|
'g': 'var',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'null': None,
|
||||||
|
'nullStr': 'null',
|
||||||
|
'true': True,
|
||||||
|
'trueStr': 'true',
|
||||||
|
'false': False,
|
||||||
|
'falseStr': 'false',
|
||||||
|
'unresolvedVar': 'var'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'int': a,
|
||||||
|
'intStr': b,
|
||||||
|
'float': c,
|
||||||
|
'floatStr': d,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': '123',
|
||||||
|
'b': '"123"',
|
||||||
|
'c': '1.23',
|
||||||
|
'd': '"1.23"',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'int': 123,
|
||||||
|
'intStr': '123',
|
||||||
|
'float': 1.23,
|
||||||
|
'floatStr': '1.23',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
json.loads(js_to_json(
|
||||||
|
'''{
|
||||||
|
'object': a,
|
||||||
|
'objectStr': b,
|
||||||
|
'array': c,
|
||||||
|
'arrayStr': d,
|
||||||
|
}''',
|
||||||
|
{
|
||||||
|
'a': '{}',
|
||||||
|
'b': '"{}"',
|
||||||
|
'c': '[]',
|
||||||
|
'd': '"[]"',
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
{
|
||||||
|
'object': {},
|
||||||
|
'objectStr': '{}',
|
||||||
|
'array': [],
|
||||||
|
'arrayStr': '[]',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def test_js_to_json_realworld(self):
|
def test_js_to_json_realworld(self):
|
||||||
inp = '''{
|
inp = '''{
|
||||||
@@ -1090,10 +1203,29 @@ class TestUtil(unittest.TestCase):
|
|||||||
on = js_to_json('[1,//{},\n2]')
|
on = js_to_json('[1,//{},\n2]')
|
||||||
self.assertEqual(json.loads(on), [1, 2])
|
self.assertEqual(json.loads(on), [1, 2])
|
||||||
|
|
||||||
|
on = js_to_json(R'"\^\$\#"')
|
||||||
|
self.assertEqual(json.loads(on), R'^$#', msg='Unnecessary escapes should be stripped')
|
||||||
|
|
||||||
|
on = js_to_json('\'"\\""\'')
|
||||||
|
self.assertEqual(json.loads(on), '"""', msg='Unnecessary quote escape should be escaped')
|
||||||
|
|
||||||
def test_js_to_json_malformed(self):
|
def test_js_to_json_malformed(self):
|
||||||
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
||||||
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
||||||
|
|
||||||
|
def test_js_to_json_template_literal(self):
|
||||||
|
self.assertEqual(js_to_json('`Hello ${name}`', {'name': '"world"'}), '"Hello world"')
|
||||||
|
self.assertEqual(js_to_json('`${name}${name}`', {'name': '"X"'}), '"XX"')
|
||||||
|
self.assertEqual(js_to_json('`${name}${name}`', {'name': '5'}), '"55"')
|
||||||
|
self.assertEqual(js_to_json('`${name}"${name}"`', {'name': '5'}), '"5\\"5\\""')
|
||||||
|
self.assertEqual(js_to_json('`${name}`', {}), '"name"')
|
||||||
|
|
||||||
|
def test_js_to_json_map_array_constructors(self):
|
||||||
|
self.assertEqual(json.loads(js_to_json('new Map([["a", 5]])')), {'a': 5})
|
||||||
|
self.assertEqual(json.loads(js_to_json('Array(5, 10)')), [5, 10])
|
||||||
|
self.assertEqual(json.loads(js_to_json('new Array(15,5)')), [15, 5])
|
||||||
|
self.assertEqual(json.loads(js_to_json('new Map([Array(5, 10),new Array(15,5)])')), {'5': 10, '15': 5})
|
||||||
|
|
||||||
def test_extract_attributes(self):
|
def test_extract_attributes(self):
|
||||||
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
|
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
|
||||||
self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
|
self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
|
||||||
@@ -1669,6 +1801,9 @@ Line 1
|
|||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||||
|
|
||||||
|
self.assertEqual(list(get_elements_text_and_html_by_attribute(
|
||||||
|
'class', 'foo', '<a class="foo">nice</a><span class="foo">nice</span>', tag='a')), [('nice', '<a class="foo">nice</a>')])
|
||||||
|
|
||||||
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
||||||
random text lorem ipsum</p>
|
random text lorem ipsum</p>
|
||||||
<div>
|
<div>
|
||||||
@@ -1726,6 +1861,8 @@ Line 1
|
|||||||
def test_clean_podcast_url(self):
|
def test_clean_podcast_url(self):
|
||||||
self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
|
self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
|
||||||
self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
|
self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
|
||||||
|
self.assertEqual(clean_podcast_url('https://pdst.fm/e/2.gum.fm/chtbl.com/track/chrt.fm/track/34D33/pscrb.fm/rss/p/traffic.megaphone.fm/ITLLC7765286967.mp3?updated=1687282661'), 'https://traffic.megaphone.fm/ITLLC7765286967.mp3?updated=1687282661')
|
||||||
|
self.assertEqual(clean_podcast_url('https://pdst.fm/e/https://mgln.ai/e/441/www.buzzsprout.com/1121972/13019085-ep-252-the-deep-life-stack.mp3'), 'https://www.buzzsprout.com/1121972/13019085-ep-252-the-deep-life-stack.mp3')
|
||||||
|
|
||||||
def test_LazyList(self):
|
def test_LazyList(self):
|
||||||
it = list(range(10))
|
it = list(range(10))
|
||||||
@@ -1822,6 +1959,452 @@ Line 1
|
|||||||
with contextlib.suppress(OSError):
|
with contextlib.suppress(OSError):
|
||||||
os.remove(FILE)
|
os.remove(FILE)
|
||||||
|
|
||||||
|
def test_determine_file_encoding(self):
|
||||||
|
self.assertEqual(determine_file_encoding(b''), (None, 0))
|
||||||
|
self.assertEqual(determine_file_encoding(b'--verbose -x --audio-format mkv\n'), (None, 0))
|
||||||
|
|
||||||
|
self.assertEqual(determine_file_encoding(b'\xef\xbb\xbf'), ('utf-8', 3))
|
||||||
|
self.assertEqual(determine_file_encoding(b'\x00\x00\xfe\xff'), ('utf-32-be', 4))
|
||||||
|
self.assertEqual(determine_file_encoding(b'\xff\xfe'), ('utf-16-le', 2))
|
||||||
|
|
||||||
|
self.assertEqual(determine_file_encoding(b'\xff\xfe# coding: utf-8\n--verbose'), ('utf-16-le', 2))
|
||||||
|
|
||||||
|
self.assertEqual(determine_file_encoding(b'# coding: utf-8\n--verbose'), ('utf-8', 0))
|
||||||
|
self.assertEqual(determine_file_encoding(b'# coding: someencodinghere-12345\n--verbose'), ('someencodinghere-12345', 0))
|
||||||
|
|
||||||
|
self.assertEqual(determine_file_encoding(b'#coding:utf-8\n--verbose'), ('utf-8', 0))
|
||||||
|
self.assertEqual(determine_file_encoding(b'# coding: utf-8 \r\n--verbose'), ('utf-8', 0))
|
||||||
|
|
||||||
|
self.assertEqual(determine_file_encoding('# coding: utf-32-be'.encode('utf-32-be')), ('utf-32-be', 0))
|
||||||
|
self.assertEqual(determine_file_encoding('# coding: utf-16-le'.encode('utf-16-le')), ('utf-16-le', 0))
|
||||||
|
|
||||||
|
def test_get_compatible_ext(self):
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None, None], vexts=['mp4'], aexts=['m4a', 'm4a']), 'mkv')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['flv'], aexts=['flv']), 'flv')
|
||||||
|
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['m4a']), 'mp4')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['webm']), 'mkv')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['weba']), 'webm')
|
||||||
|
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=['av01.0.12M.08'], acodecs=['opus'], vexts=['mp4'], aexts=['webm']), 'webm')
|
||||||
|
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=['vp9'], acodecs=['opus'], vexts=['webm'], aexts=['webm'], preferences=['flv', 'mp4']), 'mp4')
|
||||||
|
self.assertEqual(get_compatible_ext(
|
||||||
|
vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
|
||||||
|
|
||||||
|
def test_try_call(self):
|
||||||
|
def total(*x, **kwargs):
|
||||||
|
return sum(x) + sum(kwargs.values())
|
||||||
|
|
||||||
|
self.assertEqual(try_call(None), None,
|
||||||
|
msg='not a fn should give None')
|
||||||
|
self.assertEqual(try_call(lambda: 1), 1,
|
||||||
|
msg='int fn with no expected_type should give int')
|
||||||
|
self.assertEqual(try_call(lambda: 1, expected_type=int), 1,
|
||||||
|
msg='int fn with expected_type int should give int')
|
||||||
|
self.assertEqual(try_call(lambda: 1, expected_type=dict), None,
|
||||||
|
msg='int fn with wrong expected_type should give None')
|
||||||
|
self.assertEqual(try_call(total, args=(0, 1, 0, ), expected_type=int), 1,
|
||||||
|
msg='fn should accept arglist')
|
||||||
|
self.assertEqual(try_call(total, kwargs={'a': 0, 'b': 1, 'c': 0}, expected_type=int), 1,
|
||||||
|
msg='fn should accept kwargs')
|
||||||
|
self.assertEqual(try_call(lambda: 1, expected_type=dict), None,
|
||||||
|
msg='int fn with no expected_type should give None')
|
||||||
|
self.assertEqual(try_call(lambda x: {}, total, args=(42, ), expected_type=int), 42,
|
||||||
|
msg='expect first int result with expected_type int')
|
||||||
|
|
||||||
|
def test_variadic(self):
|
||||||
|
self.assertEqual(variadic(None), (None, ))
|
||||||
|
self.assertEqual(variadic('spam'), ('spam', ))
|
||||||
|
self.assertEqual(variadic('spam', allowed_types=dict), 'spam')
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter('ignore')
|
||||||
|
self.assertEqual(variadic('spam', allowed_types=[dict]), 'spam')
|
||||||
|
|
||||||
|
def test_traverse_obj(self):
|
||||||
|
_TEST_DATA = {
|
||||||
|
100: 100,
|
||||||
|
1.2: 1.2,
|
||||||
|
'str': 'str',
|
||||||
|
'None': None,
|
||||||
|
'...': ...,
|
||||||
|
'urls': [
|
||||||
|
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||||
|
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||||
|
],
|
||||||
|
'data': (
|
||||||
|
{'index': 2},
|
||||||
|
{'index': 3},
|
||||||
|
),
|
||||||
|
'dict': {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test base functionality
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
||||||
|
msg='allow tuple path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
||||||
|
msg='allow list path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
||||||
|
msg='allow iterable path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
||||||
|
msg='single items should be treated as a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
||||||
|
|
||||||
|
# Test Ellipsis behavior
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
|
||||||
|
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
||||||
|
msg='`...` should give all non discarded values')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
|
||||||
|
msg='`...` selection for dicts should select all values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='nested `...` queries should work')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
|
||||||
|
msg='`...` query result should be flattened')
|
||||||
|
self.assertEqual(traverse_obj(iter(range(4)), ...), list(range(4)),
|
||||||
|
msg='`...` should accept iterables')
|
||||||
|
|
||||||
|
# Test function as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
||||||
|
[_TEST_DATA['urls']],
|
||||||
|
msg='function as query key should perform a filter based on (key, value)')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
|
||||||
|
msg='exceptions in the query function should be catched')
|
||||||
|
self.assertEqual(traverse_obj(iter(range(4)), lambda _, x: x % 2 == 0), [0, 2],
|
||||||
|
msg='function key should accept iterables')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a: ...)
|
||||||
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, lambda a, b, c: ...)
|
||||||
|
|
||||||
|
# Test set as key (transformation/type, like `expected_type`)
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper}, )), ['STR'],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str})), ['str'],
|
||||||
|
msg='Type in set should be a type filter')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {dict}), _TEST_DATA,
|
||||||
|
msg='A single set should be wrapped into a path')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper})), ['STR'],
|
||||||
|
msg='Transformation function should not raise')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str_or_none})),
|
||||||
|
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
||||||
|
msg='Function in set should be a transformation')
|
||||||
|
if __debug__:
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, set())
|
||||||
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
||||||
|
traverse_obj(_TEST_DATA, {str.upper, str})
|
||||||
|
|
||||||
|
# Test `slice` as a key
|
||||||
|
_SLICE_DATA = [0, 1, 2, 3, 4]
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', slice(1))), None,
|
||||||
|
msg='slice on a dictionary should not throw')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1)), _SLICE_DATA[:1],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 2)), _SLICE_DATA[1:2],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
self.assertEqual(traverse_obj(_SLICE_DATA, slice(1, 4, 2)), _SLICE_DATA[1:4:2],
|
||||||
|
msg='slice key should apply slice to sequence')
|
||||||
|
|
||||||
|
# Test alternative paths
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
||||||
|
msg='multiple `paths` should be treated as alternative paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
||||||
|
msg='alternatives should exit early')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
||||||
|
msg='alternatives should return `default` if exhausted')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
|
||||||
|
msg='alternatives should track their own branching return')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
|
||||||
|
msg='alternatives on empty objects should search further')
|
||||||
|
|
||||||
|
# Test branch and path nesting
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='tuple as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
||||||
|
msg='list as key should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
||||||
|
msg='double nesting in path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
||||||
|
msg='do not fail early on branching')
|
||||||
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='tripple nesting in path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
|
||||||
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||||
|
msg='ellipsis as branch path start gets flattened')
|
||||||
|
|
||||||
|
# Test dictionary as key
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
||||||
|
msg='dict key should result in a dict with the same keys')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
||||||
|
{0: 'https://www.example.com/0'},
|
||||||
|
msg='dict key should allow paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='tuple in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/0']},
|
||||||
|
msg='double nesting in dict path should be treated as paths')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
||||||
|
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
||||||
|
msg='tripple nesting in dict path should be treated as branches')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
||||||
|
msg='remove `None` values when top level dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
|
||||||
|
msg='use `default` if key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
||||||
|
msg='remove empty values when dict key')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: ...},
|
||||||
|
msg='use `default` when dict key and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
||||||
|
msg='remove empty values when nested dict key fails')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
||||||
|
msg='default to dict if pruned')
|
||||||
|
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=...), {0: ...},
|
||||||
|
msg='default to dict if pruned and default is given')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...), {0: {0: ...}},
|
||||||
|
msg='use nested `default` when nested dict key fails and `default`')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {},
|
||||||
|
msg='remove key if branch in dict key not successful')
|
||||||
|
|
||||||
|
# Testing default parameter behavior
|
||||||
|
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
||||||
|
msg='default value should be `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
|
||||||
|
msg='chained fails should result in default')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
||||||
|
msg='should not short cirquit on `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
||||||
|
msg='invalid dict key should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
||||||
|
msg='`None` is a deliberate sentinel and should become `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
||||||
|
msg='`IndexError` should result in `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
|
||||||
|
msg='if branched but not successful return `default` if defined, not `[]`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
|
||||||
|
msg='if branched but not successful return `default` even if `default` is `None`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
|
||||||
|
msg='if branched but not successful return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
|
||||||
|
msg='if branched but object is empty return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj(None, ...), [],
|
||||||
|
msg='if branched but object is `None` return `[]`, not `default`')
|
||||||
|
self.assertEqual(traverse_obj({0: None}, (0, ...)), [],
|
||||||
|
msg='if branched but state is `None` return `[]`, not `default`')
|
||||||
|
|
||||||
|
branching_paths = [
|
||||||
|
('fail', ...),
|
||||||
|
(..., 'fail'),
|
||||||
|
100 * ('fail',) + (...,),
|
||||||
|
(...,) + 100 * ('fail',),
|
||||||
|
]
|
||||||
|
for branching_path in branching_paths:
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path), [],
|
||||||
|
msg='if branched but state is `None`, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
||||||
|
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
||||||
|
msg='if branching in last alternative and previous did match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
||||||
|
msg='if branching in first alternative and non-branching path does match, return single value')
|
||||||
|
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
||||||
|
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
||||||
|
|
||||||
|
# Testing expected_type behavior
|
||||||
|
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
||||||
|
'str', msg='accept matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
||||||
|
None, msg='reject non matching `expected_type` type')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
||||||
|
'0', msg='transform type using type function')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
||||||
|
None, msg='wrap expected_type fuction in try_call')
|
||||||
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str),
|
||||||
|
['str'], msg='eliminate items that expected_type fails on')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
||||||
|
{0: 100}, msg='type as expected_type should filter dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
||||||
|
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int),
|
||||||
|
1, msg='expected_type should not filter non final dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
||||||
|
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)),
|
||||||
|
[{0: ...}, {0: ...}], msg='expected_type should transform branched dict values')
|
||||||
|
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
||||||
|
[4], msg='expected_type regression for type matching in tuple branching')
|
||||||
|
self.assertEqual(traverse_obj(_TEST_DATA, ['data', ...], expected_type=int),
|
||||||
|
[], msg='expected_type regression for type matching in dict result')
|
||||||
|
|
||||||
|
# Test get_all behavior
|
||||||
|
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
|
||||||
|
msg='if not `get_all`, return only first matching value')
|
||||||
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
|
||||||
|
msg='do not overflatten if not `get_all`')
|
||||||
|
|
||||||
|
# Test casesense behavior
|
||||||
|
_CASESENSE_DATA = {
|
||||||
|
'KeY': 'value0',
|
||||||
|
0: {
|
||||||
|
'KeY': 'value1',
|
||||||
|
0: {'KeY': 'value2'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
||||||
|
msg='dict keys should be case sensitive unless `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
||||||
|
casesense=False), 'value0',
|
||||||
|
msg='allow non matching key case if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
||||||
|
casesense=False), ['value1'],
|
||||||
|
msg='allow non matching key case in branch if `casesense`')
|
||||||
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
||||||
|
casesense=False), ['value2'],
|
||||||
|
msg='allow non matching key case in branch path if `casesense`')
|
||||||
|
|
||||||
|
# Test traverse_string behavior
|
||||||
|
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
||||||
|
msg='do not traverse into string if not `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
||||||
|
traverse_string=True), 's',
|
||||||
|
msg='traverse into string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
||||||
|
traverse_string=True), '.',
|
||||||
|
msg='traverse into converted data if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
|
||||||
|
traverse_string=True), 'str',
|
||||||
|
msg='`...` should result in string (same value) if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
||||||
|
traverse_string=True), 'sr',
|
||||||
|
msg='`slice` should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"),
|
||||||
|
traverse_string=True), 'str',
|
||||||
|
msg='function should result in string if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
||||||
|
traverse_string=True), ['s', 'r'],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, ...), traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, lambda x, y: True), traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
self.assertEqual(traverse_obj({}, (0, slice(1)), traverse_string=True), [],
|
||||||
|
msg='branching should result in list if `traverse_string`')
|
||||||
|
|
||||||
|
# Test is_user_input behavior
|
||||||
|
_IS_USER_INPUT_DATA = {'range8': list(range(8))}
|
||||||
|
self.assertEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3'),
|
||||||
|
is_user_input=True), 3,
|
||||||
|
msg='allow for string indexing if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3:'),
|
||||||
|
is_user_input=True), tuple(range(8))[3:],
|
||||||
|
msg='allow for string slice if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':4:2'),
|
||||||
|
is_user_input=True), tuple(range(8))[:4:2],
|
||||||
|
msg='allow step in string slice if `is_user_input`')
|
||||||
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':'),
|
||||||
|
is_user_input=True), range(8),
|
||||||
|
msg='`:` should be treated as `...` if `is_user_input`')
|
||||||
|
with self.assertRaises(TypeError, msg='too many params should result in error'):
|
||||||
|
traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':::'), is_user_input=True)
|
||||||
|
|
||||||
|
# Test re.Match as input obj
|
||||||
|
mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
|
||||||
|
self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
|
||||||
|
msg='`...` on a `re.Match` should give its `groups()`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give groupno, value starting at 0')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
||||||
|
msg='str key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 2), '3',
|
||||||
|
msg='int key on a `re.Match` should give group with that name')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
||||||
|
msg='str key on a `re.Match` should respect casesense')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
||||||
|
msg='failing str key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, 8), None,
|
||||||
|
msg='failing int key on a `re.Match` should return `default`')
|
||||||
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
||||||
|
msg='function on a `re.Match` should give group name as well')
|
||||||
|
|
||||||
|
def test_http_header_dict(self):
|
||||||
|
headers = HTTPHeaderDict()
|
||||||
|
headers['ytdl-test'] = b'0'
|
||||||
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '0')])
|
||||||
|
headers['ytdl-test'] = 1
|
||||||
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '1')])
|
||||||
|
headers['Ytdl-test'] = '2'
|
||||||
|
self.assertEqual(list(headers.items()), [('Ytdl-Test', '2')])
|
||||||
|
self.assertTrue('ytDl-Test' in headers)
|
||||||
|
self.assertEqual(str(headers), str(dict(headers)))
|
||||||
|
self.assertEqual(repr(headers), str(dict(headers)))
|
||||||
|
|
||||||
|
headers.update({'X-dlp': 'data'})
|
||||||
|
self.assertEqual(set(headers.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data')})
|
||||||
|
self.assertEqual(dict(headers), {'Ytdl-Test': '2', 'X-Dlp': 'data'})
|
||||||
|
self.assertEqual(len(headers), 2)
|
||||||
|
self.assertEqual(headers.copy(), headers)
|
||||||
|
headers2 = HTTPHeaderDict({'X-dlp': 'data3'}, **headers, **{'X-dlp': 'data2'})
|
||||||
|
self.assertEqual(set(headers2.items()), {('Ytdl-Test', '2'), ('X-Dlp', 'data2')})
|
||||||
|
self.assertEqual(len(headers2), 2)
|
||||||
|
headers2.clear()
|
||||||
|
self.assertEqual(len(headers2), 0)
|
||||||
|
|
||||||
|
# ensure we prefer latter headers
|
||||||
|
headers3 = HTTPHeaderDict({'Ytdl-TeSt': 1}, {'Ytdl-test': 2})
|
||||||
|
self.assertEqual(set(headers3.items()), {('Ytdl-Test', '2')})
|
||||||
|
del headers3['ytdl-tesT']
|
||||||
|
self.assertEqual(dict(headers3), {})
|
||||||
|
|
||||||
|
headers4 = HTTPHeaderDict({'ytdl-test': 'data;'})
|
||||||
|
self.assertEqual(set(headers4.items()), {('Ytdl-Test', 'data;')})
|
||||||
|
|
||||||
|
def test_extract_basic_auth(self):
|
||||||
|
assert extract_basic_auth('http://:foo.bar') == ('http://:foo.bar', None)
|
||||||
|
assert extract_basic_auth('http://foo.bar') == ('http://foo.bar', None)
|
||||||
|
assert extract_basic_auth('http://@foo.bar') == ('http://foo.bar', 'Basic Og==')
|
||||||
|
assert extract_basic_auth('http://:pass@foo.bar') == ('http://foo.bar', 'Basic OnBhc3M=')
|
||||||
|
assert extract_basic_auth('http://user:@foo.bar') == ('http://foo.bar', 'Basic dXNlcjo=')
|
||||||
|
assert extract_basic_auth('http://user:pass@foo.bar') == ('http://foo.bar', 'Basic dXNlcjpwYXNz')
|
||||||
|
|
||||||
|
@unittest.skipUnless(compat_os_name == 'nt', 'Only relevant on Windows')
|
||||||
|
def test_Popen_windows_escaping(self):
|
||||||
|
def run_shell(args):
|
||||||
|
stdout, stderr, error = Popen.run(
|
||||||
|
args, text=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
assert not stderr
|
||||||
|
assert not error
|
||||||
|
return stdout
|
||||||
|
|
||||||
|
# Test escaping
|
||||||
|
assert run_shell(['echo', 'test"&']) == '"test""&"\n'
|
||||||
|
# Test if delayed expansion is disabled
|
||||||
|
assert run_shell(['echo', '^!']) == '"^!"\n'
|
||||||
|
assert run_shell('echo "^!"') == '"^!"\n'
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
from test.helper import FakeYDL, is_download_test
|
from test.helper import FakeYDL, is_download_test
|
||||||
from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
|
from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
|
||||||
|
from yt_dlp.utils import ExtractorError
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@@ -53,6 +54,18 @@ class TestYoutubeLists(unittest.TestCase):
|
|||||||
self.assertEqual(video['duration'], 10)
|
self.assertEqual(video['duration'], 10)
|
||||||
self.assertEqual(video['uploader'], 'Philipp Hagemeister')
|
self.assertEqual(video['uploader'], 'Philipp Hagemeister')
|
||||||
|
|
||||||
|
def test_youtube_channel_no_uploads(self):
|
||||||
|
dl = FakeYDL()
|
||||||
|
dl.params['extract_flat'] = True
|
||||||
|
ie = YoutubeTabIE(dl)
|
||||||
|
# no uploads
|
||||||
|
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||||
|
ie.extract('https://www.youtube.com/channel/UC2yXPzFejc422buOIzn_0CA')
|
||||||
|
|
||||||
|
# no uploads and no UCID given
|
||||||
|
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||||
|
ie.extract('https://www.youtube.com/news')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -62,10 +62,19 @@ _SIG_TESTS = [
|
|||||||
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js',
|
'https://s.ytimg.com/yts/jsbin/html5player-en_US-vflKjOTVq/html5player.js',
|
||||||
'312AA52209E3623129A412D56A40F11CB0AF14AE.3EE09501CB14E3BCDC3B2AE808BF3F1D14E7FBF12',
|
'312AA52209E3623129A412D56A40F11CB0AF14AE.3EE09501CB14E3BCDC3B2AE808BF3F1D14E7FBF12',
|
||||||
'112AA5220913623229A412D56A40F11CB0AF14AE.3EE0950FCB14EEBCDC3B2AE808BF331D14E7FBF3',
|
'112AA5220913623229A412D56A40F11CB0AF14AE.3EE0950FCB14EEBCDC3B2AE808BF331D14E7FBF3',
|
||||||
)
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/6ed0d907/player_ias.vflset/en_US/base.js',
|
||||||
|
'2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA',
|
||||||
|
'AOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL2QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
_NSIG_TESTS = [
|
_NSIG_TESTS = [
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/7862ca1f/player_ias.vflset/en_US/base.js',
|
||||||
|
'X_LCxVDjAavgE5t', 'yxJ1dM6iz5ogUg',
|
||||||
|
),
|
||||||
(
|
(
|
||||||
'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
|
'https://www.youtube.com/s/player/9216d1f7/player_ias.vflset/en_US/base.js',
|
||||||
'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
|
'SLp9F5bwjAdhE9F-', 'gWnb9IK2DJ8Q1w',
|
||||||
@@ -94,6 +103,66 @@ _NSIG_TESTS = [
|
|||||||
'https://www.youtube.com/s/player/5dd88d1d/player-plasma-ias-phone-en_US.vflset/base.js',
|
'https://www.youtube.com/s/player/5dd88d1d/player-plasma-ias-phone-en_US.vflset/base.js',
|
||||||
'kSxKFLeqzv_ZyHSAt', 'n8gS8oRlHOxPFA',
|
'kSxKFLeqzv_ZyHSAt', 'n8gS8oRlHOxPFA',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/324f67b9/player_ias.vflset/en_US/base.js',
|
||||||
|
'xdftNy7dh9QGnhW', '22qLGxrmX8F1rA',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
|
||||||
|
'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/c81bbb4a/player_ias.vflset/en_US/base.js',
|
||||||
|
'gre3EcLurNY2vqp94', 'Z9DfGxWP115WTg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/1f7d5369/player_ias.vflset/en_US/base.js',
|
||||||
|
'batNX7sYqIJdkJ', 'IhOkL_zxbkOZBw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/009f1d77/player_ias.vflset/en_US/base.js',
|
||||||
|
'5dwFHw8aFWQUQtffRq', 'audescmLUzI3jw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/dc0c6770/player_ias.vflset/en_US/base.js',
|
||||||
|
'5EHDMgYLV6HPGk_Mu-kk', 'n9lUJLHbxUI0GQ',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/113ca41c/player_ias.vflset/en_US/base.js',
|
||||||
|
'cgYl-tlYkhjT7A', 'hI7BBr2zUgcmMg',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/c57c113c/player_ias.vflset/en_US/base.js',
|
||||||
|
'M92UUMHa8PdvPd3wyM', '3hPqLJsiNZx7yA',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5a3b6271/player_ias.vflset/en_US/base.js',
|
||||||
|
'B2j7f_UPT4rfje85Lu_e', 'm5DmNymaGQ5RdQ',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/7a062b77/player_ias.vflset/en_US/base.js',
|
||||||
|
'NRcE3y3mVtm_cV-W', 'VbsCYUATvqlt5w',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/dac945fd/player_ias.vflset/en_US/base.js',
|
||||||
|
'o8BkRxXhuYsBCWi6RplPdP', '3Lx32v_hmzTm6A',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/6f20102c/player_ias.vflset/en_US/base.js',
|
||||||
|
'lE8DhoDmKqnmJJ', 'pJTTX6XyJP2BYw',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/cfa9e7cb/player_ias.vflset/en_US/base.js',
|
||||||
|
'aCi3iElgd2kq0bxVbQ', 'QX1y8jGb2IbZ0w',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/8c7583ff/player_ias.vflset/en_US/base.js',
|
||||||
|
'1wWCVpRR96eAmMI87L', 'KSkWAVv1ZQxC3A',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/b7910ca8/player_ias.vflset/en_US/base.js',
|
||||||
|
'_hXMCwMt9qE310D', 'LoZMgkkofRMCZQ',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -101,6 +170,7 @@ _NSIG_TESTS = [
|
|||||||
class TestPlayerInfo(unittest.TestCase):
|
class TestPlayerInfo(unittest.TestCase):
|
||||||
def test_youtube_extract_player_info(self):
|
def test_youtube_extract_player_info(self):
|
||||||
PLAYER_URLS = (
|
PLAYER_URLS = (
|
||||||
|
('https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js', '4c3f79c5'),
|
||||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/en_US/base.js', '64dddad9'),
|
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/en_US/base.js', '64dddad9'),
|
||||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/fr_FR/base.js', '64dddad9'),
|
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/fr_FR/base.js', '64dddad9'),
|
||||||
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-en_US.vflset/base.js', '64dddad9'),
|
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-en_US.vflset/base.js', '64dddad9'),
|
||||||
@@ -169,7 +239,7 @@ def n_sig(jscode, sig_input):
|
|||||||
|
|
||||||
|
|
||||||
make_sig_test = t_factory(
|
make_sig_test = t_factory(
|
||||||
'signature', signature, re.compile(r'.*-(?P<id>[a-zA-Z0-9_-]+)(?:/watch_as3|/html5player)?\.[a-z]+$'))
|
'signature', signature, re.compile(r'.*(?:-|/player/)(?P<id>[a-zA-Z0-9_-]+)(?:/.+\.js|(?:/watch_as3|/html5player)?\.[a-z]+)$'))
|
||||||
for test_spec in _SIG_TESTS:
|
for test_spec in _SIG_TESTS:
|
||||||
make_sig_test(*test_spec)
|
make_sig_test(*test_spec)
|
||||||
|
|
||||||
|
|||||||
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?><!--Transformed by VSMT using XSL stylesheet for rule Identity--><!-- Created with Unified Streaming Platform (version=1.10.12-18737) --><SmoothStreamingMedia MajorVersion="2" MinorVersion="0" TimeScale="10000000" Duration="370000000"><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="127802" CodecPrivateData="1190" SamplingRate="48000" Channels="2" BitsPerSample="16" PacketSize="4" AudioTag="255" FourCC="AACL" /><c t="0" d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="7253333" /></StreamIndex><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu_1" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu_1={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="224000" CodecPrivateData="00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00" FourCCData="0700200F00" SamplingRate="48000" Channels="6" BitsPerSample="16" PacketSize="896" AudioTag="65534" FourCC="EC-3" /><c t="0" d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="8320000" /></StreamIndex><StreamIndex Type="video" QualityLevels="8" TimeScale="10000000" Language="deu" Name="video_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(video_deu={start time})?noStreamProfile=1" MaxWidth="1920" MaxHeight="1080" DisplayWidth="1920" DisplayHeight="1080"><QualityLevel Index="0" Bitrate="23909" CodecPrivateData="000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8" MaxWidth="384" MaxHeight="216" FourCC="AVC1" /><QualityLevel Index="1" Bitrate="403188" CodecPrivateData="00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2" MaxWidth="400" MaxHeight="224" FourCC="AVC1" /><QualityLevel Index="2" Bitrate="680365" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="3" Bitrate="1253465" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="4" Bitrate="2121558" CodecPrivateData="00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80" MaxWidth="768" MaxHeight="432" FourCC="AVC1" /><QualityLevel Index="5" Bitrate="3275545" CodecPrivateData="00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80" MaxWidth="1280" MaxHeight="720" FourCC="AVC1" /><QualityLevel Index="6" Bitrate="5300196" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><QualityLevel Index="7" Bitrate="8079312" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><c t="0" d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="10000000" /></StreamIndex></SmoothStreamingMedia>
|
||||||
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class IgnorePluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class IgnoreNotInAllPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InAllPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['InAllPluginIE']
|
||||||
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class NormalPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _IgnoreUnderscorePluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class NormalPluginPP(PostProcessor):
|
||||||
|
pass
|
||||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
|
||||||
|
|
||||||
|
class ZippedPluginIE(InfoExtractor):
|
||||||
|
pass
|
||||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class ZippedPluginPP(PostProcessor):
|
||||||
|
pass
|
||||||
1611
yt_dlp/YoutubeDL.py
1611
yt_dlp/YoutubeDL.py
File diff suppressed because it is too large
Load Diff
@@ -1,24 +1,30 @@
|
|||||||
f'You are using an unsupported version of Python. Only Python versions 3.6 and above are supported by yt-dlp' # noqa: F541
|
try:
|
||||||
|
import contextvars # noqa: F401
|
||||||
|
except Exception:
|
||||||
|
raise Exception(
|
||||||
|
f'You are using an unsupported version of Python. Only Python versions 3.7 and above are supported by yt-dlp') # noqa: F541
|
||||||
|
|
||||||
__license__ = 'Public Domain'
|
__license__ = 'Public Domain'
|
||||||
|
|
||||||
|
import collections
|
||||||
import getpass
|
import getpass
|
||||||
import itertools
|
import itertools
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
from .compat import compat_shlex_quote
|
from .compat import compat_shlex_quote
|
||||||
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
|
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
|
||||||
from .downloader import FileDownloader
|
|
||||||
from .downloader.external import get_external_downloader
|
from .downloader.external import get_external_downloader
|
||||||
from .extractor import list_extractor_classes
|
from .extractor import list_extractor_classes
|
||||||
from .extractor.adobepass import MSO_INFO
|
from .extractor.adobepass import MSO_INFO
|
||||||
from .extractor.common import InfoExtractor
|
|
||||||
from .options import parseOpts
|
from .options import parseOpts
|
||||||
from .postprocessor import (
|
from .postprocessor import (
|
||||||
FFmpegExtractAudioPP,
|
FFmpegExtractAudioPP,
|
||||||
|
FFmpegMergerPP,
|
||||||
|
FFmpegPostProcessor,
|
||||||
FFmpegSubtitlesConvertorPP,
|
FFmpegSubtitlesConvertorPP,
|
||||||
FFmpegThumbnailsConvertorPP,
|
FFmpegThumbnailsConvertorPP,
|
||||||
FFmpegVideoConvertorPP,
|
FFmpegVideoConvertorPP,
|
||||||
@@ -33,6 +39,7 @@ from .utils import (
|
|||||||
DateRange,
|
DateRange,
|
||||||
DownloadCancelled,
|
DownloadCancelled,
|
||||||
DownloadError,
|
DownloadError,
|
||||||
|
FormatSorter,
|
||||||
GeoUtils,
|
GeoUtils,
|
||||||
PlaylistEntries,
|
PlaylistEntries,
|
||||||
SameFileError,
|
SameFileError,
|
||||||
@@ -43,19 +50,22 @@ from .utils import (
|
|||||||
format_field,
|
format_field,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
match_filter_func,
|
match_filter_func,
|
||||||
|
parse_bytes,
|
||||||
parse_duration,
|
parse_duration,
|
||||||
preferredencoding,
|
preferredencoding,
|
||||||
read_batch_urls,
|
read_batch_urls,
|
||||||
read_stdin,
|
read_stdin,
|
||||||
render_table,
|
render_table,
|
||||||
setproctitle,
|
setproctitle,
|
||||||
std_headers,
|
|
||||||
traverse_obj,
|
traverse_obj,
|
||||||
variadic,
|
variadic,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
|
from .utils.networking import std_headers
|
||||||
from .YoutubeDL import YoutubeDL
|
from .YoutubeDL import YoutubeDL
|
||||||
|
|
||||||
|
_IN_CLI = False
|
||||||
|
|
||||||
|
|
||||||
def _exit(status=0, *args):
|
def _exit(status=0, *args):
|
||||||
for msg in args:
|
for msg in args:
|
||||||
@@ -82,12 +92,11 @@ def get_urls(urls, batchfile, verbose):
|
|||||||
|
|
||||||
|
|
||||||
def print_extractor_information(opts, urls):
|
def print_extractor_information(opts, urls):
|
||||||
# Importing GenericIE is currently slow since it imports other extractors
|
|
||||||
# TODO: Move this back to module level after generalization of embed detection
|
|
||||||
from .extractor.generic import GenericIE
|
|
||||||
|
|
||||||
out = ''
|
out = ''
|
||||||
if opts.list_extractors:
|
if opts.list_extractors:
|
||||||
|
# Importing GenericIE is currently slow since it imports YoutubeIE
|
||||||
|
from .extractor.generic import GenericIE
|
||||||
|
|
||||||
urls = dict.fromkeys(urls, False)
|
urls = dict.fromkeys(urls, False)
|
||||||
for ie in list_extractor_classes(opts.age_limit):
|
for ie in list_extractor_classes(opts.age_limit):
|
||||||
out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
|
out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
|
||||||
@@ -143,7 +152,7 @@ def set_compat_opts(opts):
|
|||||||
else:
|
else:
|
||||||
opts.embed_infojson = False
|
opts.embed_infojson = False
|
||||||
if 'format-sort' in opts.compat_opts:
|
if 'format-sort' in opts.compat_opts:
|
||||||
opts.format_sort.extend(InfoExtractor.FormatSort.ytdl_default)
|
opts.format_sort.extend(FormatSorter.ytdl_default)
|
||||||
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
||||||
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
||||||
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
||||||
@@ -179,8 +188,8 @@ def validate_options(opts):
|
|||||||
raise ValueError(f'{max_name} "{max_val}" must be must be greater than or equal to {min_name} "{min_val}"')
|
raise ValueError(f'{max_name} "{max_val}" must be must be greater than or equal to {min_name} "{min_val}"')
|
||||||
|
|
||||||
# Usernames and passwords
|
# Usernames and passwords
|
||||||
validate(not opts.usenetrc or (opts.username is None and opts.password is None),
|
validate(sum(map(bool, (opts.usenetrc, opts.netrc_cmd, opts.username))) <= 1, '.netrc',
|
||||||
'.netrc', msg='using {name} conflicts with giving username/password')
|
msg='{name}, netrc command and username/password are mutually exclusive options')
|
||||||
validate(opts.password is None or opts.username is not None, 'account username', msg='{name} missing')
|
validate(opts.password is None or opts.username is not None, 'account username', msg='{name} missing')
|
||||||
validate(opts.ap_password is None or opts.ap_username is not None,
|
validate(opts.ap_password is None or opts.ap_username is not None,
|
||||||
'TV Provider account username', msg='{name} missing')
|
'TV Provider account username', msg='{name} missing')
|
||||||
@@ -218,9 +227,11 @@ def validate_options(opts):
|
|||||||
|
|
||||||
# Format sort
|
# Format sort
|
||||||
for f in opts.format_sort:
|
for f in opts.format_sort:
|
||||||
validate_regex('format sorting', f, InfoExtractor.FormatSort.regex)
|
validate_regex('format sorting', f, FormatSorter.regex)
|
||||||
|
|
||||||
# Postprocessor formats
|
# Postprocessor formats
|
||||||
|
validate_regex('merge output format', opts.merge_output_format,
|
||||||
|
r'({0})(/({0}))*'.format('|'.join(map(re.escape, FFmpegMergerPP.SUPPORTED_EXTS))))
|
||||||
validate_regex('audio format', opts.audioformat, FFmpegExtractAudioPP.FORMAT_RE)
|
validate_regex('audio format', opts.audioformat, FFmpegExtractAudioPP.FORMAT_RE)
|
||||||
validate_in('subtitle format', opts.convertsubtitles, FFmpegSubtitlesConvertorPP.SUPPORTED_EXTS)
|
validate_in('subtitle format', opts.convertsubtitles, FFmpegSubtitlesConvertorPP.SUPPORTED_EXTS)
|
||||||
validate_regex('thumbnail format', opts.convertthumbnails, FFmpegThumbnailsConvertorPP.FORMAT_RE)
|
validate_regex('thumbnail format', opts.convertthumbnails, FFmpegThumbnailsConvertorPP.FORMAT_RE)
|
||||||
@@ -270,19 +281,19 @@ def validate_options(opts):
|
|||||||
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
|
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
|
||||||
|
|
||||||
# Bytes
|
# Bytes
|
||||||
def parse_bytes(name, value):
|
def validate_bytes(name, value):
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
numeric_limit = FileDownloader.parse_bytes(value)
|
numeric_limit = parse_bytes(value)
|
||||||
validate(numeric_limit is not None, 'rate limit', value)
|
validate(numeric_limit is not None, 'rate limit', value)
|
||||||
return numeric_limit
|
return numeric_limit
|
||||||
|
|
||||||
opts.ratelimit = parse_bytes('rate limit', opts.ratelimit)
|
opts.ratelimit = validate_bytes('rate limit', opts.ratelimit)
|
||||||
opts.throttledratelimit = parse_bytes('throttled rate limit', opts.throttledratelimit)
|
opts.throttledratelimit = validate_bytes('throttled rate limit', opts.throttledratelimit)
|
||||||
opts.min_filesize = parse_bytes('min filesize', opts.min_filesize)
|
opts.min_filesize = validate_bytes('min filesize', opts.min_filesize)
|
||||||
opts.max_filesize = parse_bytes('max filesize', opts.max_filesize)
|
opts.max_filesize = validate_bytes('max filesize', opts.max_filesize)
|
||||||
opts.buffersize = parse_bytes('buffer size', opts.buffersize)
|
opts.buffersize = validate_bytes('buffer size', opts.buffersize)
|
||||||
opts.http_chunk_size = parse_bytes('http chunk size', opts.http_chunk_size)
|
opts.http_chunk_size = validate_bytes('http chunk size', opts.http_chunk_size)
|
||||||
|
|
||||||
# Output templates
|
# Output templates
|
||||||
def validate_outtmpl(tmpl, msg):
|
def validate_outtmpl(tmpl, msg):
|
||||||
@@ -308,37 +319,63 @@ def validate_options(opts):
|
|||||||
if outtmpl_default == '':
|
if outtmpl_default == '':
|
||||||
opts.skip_download = None
|
opts.skip_download = None
|
||||||
del opts.outtmpl['default']
|
del opts.outtmpl['default']
|
||||||
if outtmpl_default and not os.path.splitext(outtmpl_default)[1] and opts.extractaudio:
|
|
||||||
raise ValueError(
|
|
||||||
'Cannot download a video and extract audio into the same file! '
|
|
||||||
f'Use "{outtmpl_default}.%(ext)s" instead of "{outtmpl_default}" as the output template')
|
|
||||||
|
|
||||||
def parse_chapters(name, value):
|
def parse_chapters(name, value, advanced=False):
|
||||||
chapters, ranges = [], []
|
parse_timestamp = lambda x: float('inf') if x in ('inf', 'infinite') else parse_duration(x)
|
||||||
|
TIMESTAMP_RE = r'''(?x)(?:
|
||||||
|
(?P<start_sign>-?)(?P<start>[^-]+)
|
||||||
|
)?\s*-\s*(?:
|
||||||
|
(?P<end_sign>-?)(?P<end>[^-]+)
|
||||||
|
)?'''
|
||||||
|
|
||||||
|
chapters, ranges, from_url = [], [], False
|
||||||
for regex in value or []:
|
for regex in value or []:
|
||||||
if regex.startswith('*'):
|
if advanced and regex == '*from-url':
|
||||||
for range in regex[1:].split(','):
|
from_url = True
|
||||||
dur = tuple(map(parse_duration, range.strip().split('-')))
|
continue
|
||||||
if len(dur) == 2 and all(t is not None for t in dur):
|
elif not regex.startswith('*'):
|
||||||
ranges.append(dur)
|
try:
|
||||||
else:
|
chapters.append(re.compile(regex))
|
||||||
raise ValueError(f'invalid {name} time range "{regex}". Must be of the form *start-end')
|
except re.error as err:
|
||||||
|
raise ValueError(f'invalid {name} regex "{regex}" - {err}')
|
||||||
continue
|
continue
|
||||||
try:
|
|
||||||
chapters.append(re.compile(regex))
|
|
||||||
except re.error as err:
|
|
||||||
raise ValueError(f'invalid {name} regex "{regex}" - {err}')
|
|
||||||
return chapters, ranges
|
|
||||||
|
|
||||||
opts.remove_chapters, opts.remove_ranges = parse_chapters('--remove-chapters', opts.remove_chapters)
|
for range_ in map(str.strip, regex[1:].split(',')):
|
||||||
opts.download_ranges = download_range_func(*parse_chapters('--download-sections', opts.download_ranges))
|
mobj = range_ != '-' and re.fullmatch(TIMESTAMP_RE, range_)
|
||||||
|
dur = mobj and [parse_timestamp(mobj.group('start') or '0'), parse_timestamp(mobj.group('end') or 'inf')]
|
||||||
|
signs = mobj and (mobj.group('start_sign'), mobj.group('end_sign'))
|
||||||
|
|
||||||
|
err = None
|
||||||
|
if None in (dur or [None]):
|
||||||
|
err = 'Must be of the form "*start-end"'
|
||||||
|
elif not advanced and any(signs):
|
||||||
|
err = 'Negative timestamps are not allowed'
|
||||||
|
else:
|
||||||
|
dur[0] *= -1 if signs[0] else 1
|
||||||
|
dur[1] *= -1 if signs[1] else 1
|
||||||
|
if dur[1] == float('-inf'):
|
||||||
|
err = '"-inf" is not a valid end'
|
||||||
|
if err:
|
||||||
|
raise ValueError(f'invalid {name} time range "{regex}". {err}')
|
||||||
|
ranges.append(dur)
|
||||||
|
|
||||||
|
return chapters, ranges, from_url
|
||||||
|
|
||||||
|
opts.remove_chapters, opts.remove_ranges, _ = parse_chapters('--remove-chapters', opts.remove_chapters)
|
||||||
|
opts.download_ranges = download_range_func(*parse_chapters('--download-sections', opts.download_ranges, True))
|
||||||
|
|
||||||
# Cookies from browser
|
# Cookies from browser
|
||||||
if opts.cookiesfrombrowser:
|
if opts.cookiesfrombrowser:
|
||||||
mobj = re.match(r'(?P<name>[^+:]+)(\s*\+\s*(?P<keyring>[^:]+))?(\s*:(?P<profile>.+))?', opts.cookiesfrombrowser)
|
container = None
|
||||||
|
mobj = re.fullmatch(r'''(?x)
|
||||||
|
(?P<name>[^+:]+)
|
||||||
|
(?:\s*\+\s*(?P<keyring>[^:]+))?
|
||||||
|
(?:\s*:\s*(?!:)(?P<profile>.+?))?
|
||||||
|
(?:\s*::\s*(?P<container>.+))?
|
||||||
|
''', opts.cookiesfrombrowser)
|
||||||
if mobj is None:
|
if mobj is None:
|
||||||
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
|
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
|
||||||
browser_name, keyring, profile = mobj.group('name', 'keyring', 'profile')
|
browser_name, keyring, profile, container = mobj.group('name', 'keyring', 'profile', 'container')
|
||||||
browser_name = browser_name.lower()
|
browser_name = browser_name.lower()
|
||||||
if browser_name not in SUPPORTED_BROWSERS:
|
if browser_name not in SUPPORTED_BROWSERS:
|
||||||
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
|
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
|
||||||
@@ -348,7 +385,7 @@ def validate_options(opts):
|
|||||||
if keyring not in SUPPORTED_KEYRINGS:
|
if keyring not in SUPPORTED_KEYRINGS:
|
||||||
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
||||||
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
||||||
opts.cookiesfrombrowser = (browser_name, profile, keyring)
|
opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
|
||||||
|
|
||||||
# MetadataParser
|
# MetadataParser
|
||||||
def metadataparser_actions(f):
|
def metadataparser_actions(f):
|
||||||
@@ -369,10 +406,12 @@ def validate_options(opts):
|
|||||||
raise ValueError(f'{cmd} is invalid; {err}')
|
raise ValueError(f'{cmd} is invalid; {err}')
|
||||||
yield action
|
yield action
|
||||||
|
|
||||||
parse_metadata = opts.parse_metadata or []
|
|
||||||
if opts.metafromtitle is not None:
|
if opts.metafromtitle is not None:
|
||||||
parse_metadata.append('title:%s' % opts.metafromtitle)
|
opts.parse_metadata.setdefault('pre_process', []).append('title:%s' % opts.metafromtitle)
|
||||||
opts.parse_metadata = list(itertools.chain(*map(metadataparser_actions, parse_metadata)))
|
opts.parse_metadata = {
|
||||||
|
k: list(itertools.chain(*map(metadataparser_actions, v)))
|
||||||
|
for k, v in opts.parse_metadata.items()
|
||||||
|
}
|
||||||
|
|
||||||
# Other options
|
# Other options
|
||||||
if opts.playlist_items is not None:
|
if opts.playlist_items is not None:
|
||||||
@@ -381,18 +420,26 @@ def validate_options(opts):
|
|||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise ValueError(f'Invalid playlist-items {opts.playlist_items!r}: {err}')
|
raise ValueError(f'Invalid playlist-items {opts.playlist_items!r}: {err}')
|
||||||
|
|
||||||
geo_bypass_code = opts.geo_bypass_ip_block or opts.geo_bypass_country
|
opts.geo_bypass_country, opts.geo_bypass_ip_block = None, None
|
||||||
if geo_bypass_code is not None:
|
if opts.geo_bypass.lower() not in ('default', 'never'):
|
||||||
try:
|
try:
|
||||||
GeoUtils.random_ipv4(geo_bypass_code)
|
GeoUtils.random_ipv4(opts.geo_bypass)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise ValueError('unsupported geo-bypass country or ip-block')
|
raise ValueError(f'Unsupported --xff "{opts.geo_bypass}"')
|
||||||
|
if len(opts.geo_bypass) == 2:
|
||||||
|
opts.geo_bypass_country = opts.geo_bypass
|
||||||
|
else:
|
||||||
|
opts.geo_bypass_ip_block = opts.geo_bypass
|
||||||
|
opts.geo_bypass = opts.geo_bypass.lower() != 'never'
|
||||||
|
|
||||||
opts.match_filter = match_filter_func(opts.match_filter)
|
opts.match_filter = match_filter_func(opts.match_filter, opts.breaking_match_filter)
|
||||||
|
|
||||||
if opts.download_archive is not None:
|
if opts.download_archive is not None:
|
||||||
opts.download_archive = expand_path(opts.download_archive)
|
opts.download_archive = expand_path(opts.download_archive)
|
||||||
|
|
||||||
|
if opts.ffmpeg_location is not None:
|
||||||
|
opts.ffmpeg_location = expand_path(opts.ffmpeg_location)
|
||||||
|
|
||||||
if opts.user_agent is not None:
|
if opts.user_agent is not None:
|
||||||
opts.headers.setdefault('User-Agent', opts.user_agent)
|
opts.headers.setdefault('User-Agent', opts.user_agent)
|
||||||
if opts.referer is not None:
|
if opts.referer is not None:
|
||||||
@@ -412,6 +459,10 @@ def validate_options(opts):
|
|||||||
elif ed and proto == 'default':
|
elif ed and proto == 'default':
|
||||||
default_downloader = ed.get_basename()
|
default_downloader = ed.get_basename()
|
||||||
|
|
||||||
|
for policy in opts.color.values():
|
||||||
|
if policy not in ('always', 'auto', 'no_color', 'never'):
|
||||||
|
raise ValueError(f'"{policy}" is not a valid color policy')
|
||||||
|
|
||||||
warnings, deprecation_warnings = [], []
|
warnings, deprecation_warnings = [], []
|
||||||
|
|
||||||
# Common mistake: -f best
|
# Common mistake: -f best
|
||||||
@@ -468,7 +519,7 @@ def validate_options(opts):
|
|||||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
val1=opts.sponskrub and opts.sponskrub_cut)
|
||||||
|
|
||||||
# Conflicts with --allow-unplayable-formats
|
# Conflicts with --allow-unplayable-formats
|
||||||
report_conflict('--add-metadata', 'addmetadata')
|
report_conflict('--embed-metadata', 'addmetadata')
|
||||||
report_conflict('--embed-chapters', 'addchapters')
|
report_conflict('--embed-chapters', 'addchapters')
|
||||||
report_conflict('--embed-info-json', 'embed_infojson')
|
report_conflict('--embed-info-json', 'embed_infojson')
|
||||||
report_conflict('--embed-subs', 'embedsubtitles')
|
report_conflict('--embed-subs', 'embedsubtitles')
|
||||||
@@ -516,7 +567,7 @@ def validate_options(opts):
|
|||||||
# Do not unnecessarily download audio
|
# Do not unnecessarily download audio
|
||||||
opts.format = 'bestaudio/best'
|
opts.format = 'bestaudio/best'
|
||||||
|
|
||||||
if opts.getcomments and opts.writeinfojson is None:
|
if opts.getcomments and opts.writeinfojson is None and not opts.embed_infojson:
|
||||||
# If JSON is not printed anywhere, but comments are requested, save it to file
|
# If JSON is not printed anywhere, but comments are requested, save it to file
|
||||||
if not opts.dumpjson or opts.print_json or opts.dump_single_json:
|
if not opts.dumpjson or opts.print_json or opts.dump_single_json:
|
||||||
opts.writeinfojson = True
|
opts.writeinfojson = True
|
||||||
@@ -541,11 +592,11 @@ def validate_options(opts):
|
|||||||
def get_postprocessors(opts):
|
def get_postprocessors(opts):
|
||||||
yield from opts.add_postprocessors
|
yield from opts.add_postprocessors
|
||||||
|
|
||||||
if opts.parse_metadata:
|
for when, actions in opts.parse_metadata.items():
|
||||||
yield {
|
yield {
|
||||||
'key': 'MetadataParser',
|
'key': 'MetadataParser',
|
||||||
'actions': opts.parse_metadata,
|
'actions': actions,
|
||||||
'when': 'pre_process'
|
'when': when
|
||||||
}
|
}
|
||||||
sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove
|
sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove
|
||||||
if sponsorblock_query:
|
if sponsorblock_query:
|
||||||
@@ -665,8 +716,11 @@ def get_postprocessors(opts):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ParsedOptions = collections.namedtuple('ParsedOptions', ('parser', 'options', 'urls', 'ydl_opts'))
|
||||||
|
|
||||||
|
|
||||||
def parse_options(argv=None):
|
def parse_options(argv=None):
|
||||||
""" @returns (parser, opts, urls, ydl_opts) """
|
"""@returns ParsedOptions(parser, opts, urls, ydl_opts)"""
|
||||||
parser, opts, urls = parseOpts(argv)
|
parser, opts, urls = parseOpts(argv)
|
||||||
urls = get_urls(urls, opts.batchfile, opts.verbose)
|
urls = get_urls(urls, opts.batchfile, opts.verbose)
|
||||||
|
|
||||||
@@ -678,11 +732,28 @@ def parse_options(argv=None):
|
|||||||
|
|
||||||
postprocessors = list(get_postprocessors(opts))
|
postprocessors = list(get_postprocessors(opts))
|
||||||
|
|
||||||
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[2:])
|
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[3:])
|
||||||
any_getting = any(getattr(opts, k) for k in (
|
any_getting = any(getattr(opts, k) for k in (
|
||||||
'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
|
'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
|
||||||
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
|
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
|
||||||
))
|
))
|
||||||
|
if opts.quiet is None:
|
||||||
|
opts.quiet = any_getting or opts.print_json or bool(opts.forceprint)
|
||||||
|
|
||||||
|
playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
|
||||||
|
write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
|
||||||
|
and opts.allow_playlist_files and opts.outtmpl.get('pl_infojson') != '')
|
||||||
|
if not any((
|
||||||
|
opts.extract_flat,
|
||||||
|
opts.dump_single_json,
|
||||||
|
opts.forceprint.get('playlist'),
|
||||||
|
opts.print_to_file.get('playlist'),
|
||||||
|
write_playlist_infojson,
|
||||||
|
)):
|
||||||
|
if not playlist_pps:
|
||||||
|
opts.extract_flat = 'discard'
|
||||||
|
elif playlist_pps == [{'key': 'FFmpegConcat', 'only_multi_video': True, 'when': 'playlist'}]:
|
||||||
|
opts.extract_flat = 'discard_in_playlist'
|
||||||
|
|
||||||
final_ext = (
|
final_ext = (
|
||||||
opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS
|
opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS
|
||||||
@@ -690,9 +761,10 @@ def parse_options(argv=None):
|
|||||||
else opts.audioformat if (opts.extractaudio and opts.audioformat in FFmpegExtractAudioPP.SUPPORTED_EXTS)
|
else opts.audioformat if (opts.extractaudio and opts.audioformat in FFmpegExtractAudioPP.SUPPORTED_EXTS)
|
||||||
else None)
|
else None)
|
||||||
|
|
||||||
return parser, opts, urls, {
|
return ParsedOptions(parser, opts, urls, {
|
||||||
'usenetrc': opts.usenetrc,
|
'usenetrc': opts.usenetrc,
|
||||||
'netrc_location': opts.netrc_location,
|
'netrc_location': opts.netrc_location,
|
||||||
|
'netrc_cmd': opts.netrc_cmd,
|
||||||
'username': opts.username,
|
'username': opts.username,
|
||||||
'password': opts.password,
|
'password': opts.password,
|
||||||
'twofactor': opts.twofactor,
|
'twofactor': opts.twofactor,
|
||||||
@@ -703,7 +775,7 @@ def parse_options(argv=None):
|
|||||||
'client_certificate': opts.client_certificate,
|
'client_certificate': opts.client_certificate,
|
||||||
'client_certificate_key': opts.client_certificate_key,
|
'client_certificate_key': opts.client_certificate_key,
|
||||||
'client_certificate_password': opts.client_certificate_password,
|
'client_certificate_password': opts.client_certificate_password,
|
||||||
'quiet': opts.quiet or any_getting or opts.print_json or bool(opts.forceprint),
|
'quiet': opts.quiet,
|
||||||
'no_warnings': opts.no_warnings,
|
'no_warnings': opts.no_warnings,
|
||||||
'forceurl': opts.geturl,
|
'forceurl': opts.geturl,
|
||||||
'forcetitle': opts.gettitle,
|
'forcetitle': opts.gettitle,
|
||||||
@@ -739,6 +811,7 @@ def parse_options(argv=None):
|
|||||||
'windowsfilenames': opts.windowsfilenames,
|
'windowsfilenames': opts.windowsfilenames,
|
||||||
'ignoreerrors': opts.ignoreerrors,
|
'ignoreerrors': opts.ignoreerrors,
|
||||||
'force_generic_extractor': opts.force_generic_extractor,
|
'force_generic_extractor': opts.force_generic_extractor,
|
||||||
|
'allowed_extractors': opts.allowed_extractors or ['default'],
|
||||||
'ratelimit': opts.ratelimit,
|
'ratelimit': opts.ratelimit,
|
||||||
'throttledratelimit': opts.throttledratelimit,
|
'throttledratelimit': opts.throttledratelimit,
|
||||||
'overwrites': opts.overwrites,
|
'overwrites': opts.overwrites,
|
||||||
@@ -814,6 +887,7 @@ def parse_options(argv=None):
|
|||||||
'legacyserverconnect': opts.legacy_server_connect,
|
'legacyserverconnect': opts.legacy_server_connect,
|
||||||
'nocheckcertificate': opts.no_check_certificate,
|
'nocheckcertificate': opts.no_check_certificate,
|
||||||
'prefer_insecure': opts.prefer_insecure,
|
'prefer_insecure': opts.prefer_insecure,
|
||||||
|
'enable_file_urls': opts.enable_file_urls,
|
||||||
'http_headers': opts.headers,
|
'http_headers': opts.headers,
|
||||||
'proxy': opts.proxy,
|
'proxy': opts.proxy,
|
||||||
'socket_timeout': opts.socket_timeout,
|
'socket_timeout': opts.socket_timeout,
|
||||||
@@ -848,7 +922,7 @@ def parse_options(argv=None):
|
|||||||
'playlist_items': opts.playlist_items,
|
'playlist_items': opts.playlist_items,
|
||||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
'xattr_set_filesize': opts.xattr_set_filesize,
|
||||||
'match_filter': opts.match_filter,
|
'match_filter': opts.match_filter,
|
||||||
'no_color': opts.no_color,
|
'color': opts.color,
|
||||||
'ffmpeg_location': opts.ffmpeg_location,
|
'ffmpeg_location': opts.ffmpeg_location,
|
||||||
'hls_prefer_native': opts.hls_prefer_native,
|
'hls_prefer_native': opts.hls_prefer_native,
|
||||||
'hls_use_mpegts': opts.hls_use_mpegts,
|
'hls_use_mpegts': opts.hls_use_mpegts,
|
||||||
@@ -863,7 +937,7 @@ def parse_options(argv=None):
|
|||||||
'_warnings': warnings,
|
'_warnings': warnings,
|
||||||
'_deprecation_warnings': deprecation_warnings,
|
'_deprecation_warnings': deprecation_warnings,
|
||||||
'compat_opts': opts.compat_opts,
|
'compat_opts': opts.compat_opts,
|
||||||
}
|
})
|
||||||
|
|
||||||
|
|
||||||
def _real_main(argv=None):
|
def _real_main(argv=None):
|
||||||
@@ -880,6 +954,11 @@ def _real_main(argv=None):
|
|||||||
if print_extractor_information(opts, all_urls):
|
if print_extractor_information(opts, all_urls):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# We may need ffmpeg_location without having access to the YoutubeDL instance
|
||||||
|
# See https://github.com/yt-dlp/yt-dlp/issues/2191
|
||||||
|
if opts.ffmpeg_location:
|
||||||
|
FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
|
||||||
|
|
||||||
with YoutubeDL(ydl_opts) as ydl:
|
with YoutubeDL(ydl_opts) as ydl:
|
||||||
pre_process = opts.update_self or opts.rm_cachedir
|
pre_process = opts.update_self or opts.rm_cachedir
|
||||||
actual_use = all_urls or opts.load_info_filename
|
actual_use = all_urls or opts.load_info_filename
|
||||||
@@ -887,14 +966,18 @@ def _real_main(argv=None):
|
|||||||
if opts.rm_cachedir:
|
if opts.rm_cachedir:
|
||||||
ydl.cache.remove()
|
ydl.cache.remove()
|
||||||
|
|
||||||
updater = Updater(ydl)
|
try:
|
||||||
if opts.update_self and updater.update() and actual_use:
|
updater = Updater(ydl, opts.update_self)
|
||||||
if updater.cmd:
|
if opts.update_self and updater.update() and actual_use:
|
||||||
return updater.restart()
|
if updater.cmd:
|
||||||
# This code is reachable only for zip variant in py < 3.10
|
return updater.restart()
|
||||||
# It makes sense to exit here, but the old behavior is to continue
|
# This code is reachable only for zip variant in py < 3.10
|
||||||
ydl.report_warning('Restart yt-dlp to use the updated version')
|
# It makes sense to exit here, but the old behavior is to continue
|
||||||
# return 100, 'ERROR: The program must exit for the update to complete'
|
ydl.report_warning('Restart yt-dlp to use the updated version')
|
||||||
|
# return 100, 'ERROR: The program must exit for the update to complete'
|
||||||
|
except Exception:
|
||||||
|
traceback.print_exc()
|
||||||
|
ydl._download_retcode = 100
|
||||||
|
|
||||||
if not actual_use:
|
if not actual_use:
|
||||||
if pre_process:
|
if pre_process:
|
||||||
@@ -908,6 +991,8 @@ def _real_main(argv=None):
|
|||||||
parser.destroy()
|
parser.destroy()
|
||||||
try:
|
try:
|
||||||
if opts.load_info_filename is not None:
|
if opts.load_info_filename is not None:
|
||||||
|
if all_urls:
|
||||||
|
ydl.report_warning('URLs are ignored due to --load-info-json')
|
||||||
return ydl.download_with_info_file(expand_path(opts.load_info_filename))
|
return ydl.download_with_info_file(expand_path(opts.load_info_filename))
|
||||||
else:
|
else:
|
||||||
return ydl.download(all_urls)
|
return ydl.download(all_urls)
|
||||||
@@ -917,6 +1002,8 @@ def _real_main(argv=None):
|
|||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
|
global _IN_CLI
|
||||||
|
_IN_CLI = True
|
||||||
try:
|
try:
|
||||||
_exit(*variadic(_real_main(argv)))
|
_exit(*variadic(_real_main(argv)))
|
||||||
except DownloadError:
|
except DownloadError:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __package__ is None and not hasattr(sys, 'frozen'):
|
if __package__ is None and not getattr(sys, 'frozen', False):
|
||||||
# direct call of __main__.py
|
# direct call of __main__.py
|
||||||
import os.path
|
import os.path
|
||||||
path = os.path.realpath(os.path.abspath(__file__))
|
path = os.path.realpath(os.path.abspath(__file__))
|
||||||
|
|||||||
5
yt_dlp/__pyinstaller/__init__.py
Normal file
5
yt_dlp/__pyinstaller/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def get_hook_dirs():
|
||||||
|
return [os.path.dirname(__file__)]
|
||||||
32
yt_dlp/__pyinstaller/hook-yt_dlp.py
Normal file
32
yt_dlp/__pyinstaller/hook-yt_dlp.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from PyInstaller.utils.hooks import collect_submodules
|
||||||
|
|
||||||
|
|
||||||
|
def pycryptodome_module():
|
||||||
|
try:
|
||||||
|
import Cryptodome # noqa: F401
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import Crypto # noqa: F401
|
||||||
|
print('WARNING: Using Crypto since Cryptodome is not available. '
|
||||||
|
'Install with: pip install pycryptodomex', file=sys.stderr)
|
||||||
|
return 'Crypto'
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
return 'Cryptodome'
|
||||||
|
|
||||||
|
|
||||||
|
def get_hidden_imports():
|
||||||
|
yield from ('yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated')
|
||||||
|
yield from ('yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated')
|
||||||
|
yield pycryptodome_module()
|
||||||
|
yield from collect_submodules('websockets')
|
||||||
|
# These are auto-detected, but explicitly add them just in case
|
||||||
|
yield from ('mutagen', 'brotli', 'certifi')
|
||||||
|
|
||||||
|
|
||||||
|
hiddenimports = list(get_hidden_imports())
|
||||||
|
print(f'Adding imports: {hiddenimports}')
|
||||||
|
|
||||||
|
excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts']
|
||||||
@@ -2,17 +2,17 @@ import base64
|
|||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from .compat import compat_ord
|
from .compat import compat_ord
|
||||||
from .dependencies import Cryptodome_AES
|
from .dependencies import Cryptodome
|
||||||
from .utils import bytes_to_intlist, intlist_to_bytes
|
from .utils import bytes_to_intlist, intlist_to_bytes
|
||||||
|
|
||||||
if Cryptodome_AES:
|
if Cryptodome.AES:
|
||||||
def aes_cbc_decrypt_bytes(data, key, iv):
|
def aes_cbc_decrypt_bytes(data, key, iv):
|
||||||
""" Decrypt bytes with AES-CBC using pycryptodome """
|
""" Decrypt bytes with AES-CBC using pycryptodome """
|
||||||
return Cryptodome_AES.new(key, Cryptodome_AES.MODE_CBC, iv).decrypt(data)
|
return Cryptodome.AES.new(key, Cryptodome.AES.MODE_CBC, iv).decrypt(data)
|
||||||
|
|
||||||
def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce):
|
def aes_gcm_decrypt_and_verify_bytes(data, key, tag, nonce):
|
||||||
""" Decrypt bytes with AES-GCM using pycryptodome """
|
""" Decrypt bytes with AES-GCM using pycryptodome """
|
||||||
return Cryptodome_AES.new(key, Cryptodome_AES.MODE_GCM, nonce).decrypt_and_verify(data, tag)
|
return Cryptodome.AES.new(key, Cryptodome.AES.MODE_GCM, nonce).decrypt_and_verify(data, tag)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
def aes_cbc_decrypt_bytes(data, key, iv):
|
def aes_cbc_decrypt_bytes(data, key, iv):
|
||||||
@@ -24,16 +24,59 @@ else:
|
|||||||
return intlist_to_bytes(aes_gcm_decrypt_and_verify(*map(bytes_to_intlist, (data, key, tag, nonce))))
|
return intlist_to_bytes(aes_gcm_decrypt_and_verify(*map(bytes_to_intlist, (data, key, tag, nonce))))
|
||||||
|
|
||||||
|
|
||||||
def unpad_pkcs7(data):
|
def aes_cbc_encrypt_bytes(data, key, iv, **kwargs):
|
||||||
return data[:-compat_ord(data[-1])]
|
return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs))
|
||||||
|
|
||||||
|
|
||||||
BLOCK_SIZE_BYTES = 16
|
BLOCK_SIZE_BYTES = 16
|
||||||
|
|
||||||
|
|
||||||
|
def unpad_pkcs7(data):
|
||||||
|
return data[:-compat_ord(data[-1])]
|
||||||
|
|
||||||
|
|
||||||
|
def pkcs7_padding(data):
|
||||||
|
"""
|
||||||
|
PKCS#7 padding
|
||||||
|
|
||||||
|
@param {int[]} data cleartext
|
||||||
|
@returns {int[]} padding data
|
||||||
|
"""
|
||||||
|
|
||||||
|
remaining_length = BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES
|
||||||
|
return data + [remaining_length] * remaining_length
|
||||||
|
|
||||||
|
|
||||||
|
def pad_block(block, padding_mode):
|
||||||
|
"""
|
||||||
|
Pad a block with the given padding mode
|
||||||
|
@param {int[]} block block to pad
|
||||||
|
@param padding_mode padding mode
|
||||||
|
"""
|
||||||
|
padding_size = BLOCK_SIZE_BYTES - len(block)
|
||||||
|
|
||||||
|
PADDING_BYTE = {
|
||||||
|
'pkcs7': padding_size,
|
||||||
|
'iso7816': 0x0,
|
||||||
|
'whitespace': 0x20,
|
||||||
|
'zero': 0x0,
|
||||||
|
}
|
||||||
|
|
||||||
|
if padding_size < 0:
|
||||||
|
raise ValueError('Block size exceeded')
|
||||||
|
elif padding_mode not in PADDING_BYTE:
|
||||||
|
raise NotImplementedError(f'Padding mode {padding_mode} is not implemented')
|
||||||
|
|
||||||
|
if padding_mode == 'iso7816' and padding_size:
|
||||||
|
block = block + [0x80] # NB: += mutates list
|
||||||
|
padding_size -= 1
|
||||||
|
|
||||||
|
return block + [PADDING_BYTE[padding_mode]] * padding_size
|
||||||
|
|
||||||
|
|
||||||
def aes_ecb_encrypt(data, key, iv=None):
|
def aes_ecb_encrypt(data, key, iv=None):
|
||||||
"""
|
"""
|
||||||
Encrypt with aes in ECB mode
|
Encrypt with aes in ECB mode. Using PKCS#7 padding
|
||||||
|
|
||||||
@param {int[]} data cleartext
|
@param {int[]} data cleartext
|
||||||
@param {int[]} key 16/24/32-Byte cipher key
|
@param {int[]} key 16/24/32-Byte cipher key
|
||||||
@@ -46,8 +89,7 @@ def aes_ecb_encrypt(data, key, iv=None):
|
|||||||
encrypted_data = []
|
encrypted_data = []
|
||||||
for i in range(block_count):
|
for i in range(block_count):
|
||||||
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
||||||
encrypted_data += aes_encrypt(block, expanded_key)
|
encrypted_data += aes_encrypt(pkcs7_padding(block), expanded_key)
|
||||||
encrypted_data = encrypted_data[:len(data)]
|
|
||||||
|
|
||||||
return encrypted_data
|
return encrypted_data
|
||||||
|
|
||||||
@@ -137,13 +179,14 @@ def aes_cbc_decrypt(data, key, iv):
|
|||||||
return decrypted_data
|
return decrypted_data
|
||||||
|
|
||||||
|
|
||||||
def aes_cbc_encrypt(data, key, iv):
|
def aes_cbc_encrypt(data, key, iv, *, padding_mode='pkcs7'):
|
||||||
"""
|
"""
|
||||||
Encrypt with aes in CBC mode. Using PKCS#7 padding
|
Encrypt with aes in CBC mode
|
||||||
|
|
||||||
@param {int[]} data cleartext
|
@param {int[]} data cleartext
|
||||||
@param {int[]} key 16/24/32-Byte cipher key
|
@param {int[]} key 16/24/32-Byte cipher key
|
||||||
@param {int[]} iv 16-Byte IV
|
@param {int[]} iv 16-Byte IV
|
||||||
|
@param padding_mode Padding mode to use
|
||||||
@returns {int[]} encrypted data
|
@returns {int[]} encrypted data
|
||||||
"""
|
"""
|
||||||
expanded_key = key_expansion(key)
|
expanded_key = key_expansion(key)
|
||||||
@@ -153,8 +196,8 @@ def aes_cbc_encrypt(data, key, iv):
|
|||||||
previous_cipher_block = iv
|
previous_cipher_block = iv
|
||||||
for i in range(block_count):
|
for i in range(block_count):
|
||||||
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
||||||
remaining_length = BLOCK_SIZE_BYTES - len(block)
|
block = pad_block(block, padding_mode)
|
||||||
block += [remaining_length] * remaining_length
|
|
||||||
mixed_block = xor(block, previous_cipher_block)
|
mixed_block = xor(block, previous_cipher_block)
|
||||||
|
|
||||||
encrypted_block = aes_encrypt(mixed_block, expanded_key)
|
encrypted_block = aes_encrypt(mixed_block, expanded_key)
|
||||||
@@ -502,13 +545,23 @@ def ghash(subkey, data):
|
|||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'aes_ctr_decrypt',
|
|
||||||
'aes_cbc_decrypt',
|
'aes_cbc_decrypt',
|
||||||
'aes_cbc_decrypt_bytes',
|
'aes_cbc_decrypt_bytes',
|
||||||
|
'aes_ctr_decrypt',
|
||||||
'aes_decrypt_text',
|
'aes_decrypt_text',
|
||||||
'aes_encrypt',
|
'aes_decrypt',
|
||||||
|
'aes_ecb_decrypt',
|
||||||
'aes_gcm_decrypt_and_verify',
|
'aes_gcm_decrypt_and_verify',
|
||||||
'aes_gcm_decrypt_and_verify_bytes',
|
'aes_gcm_decrypt_and_verify_bytes',
|
||||||
|
|
||||||
|
'aes_cbc_encrypt',
|
||||||
|
'aes_cbc_encrypt_bytes',
|
||||||
|
'aes_ctr_encrypt',
|
||||||
|
'aes_ecb_encrypt',
|
||||||
|
'aes_encrypt',
|
||||||
|
|
||||||
'key_expansion',
|
'key_expansion',
|
||||||
|
'pad_block',
|
||||||
|
'pkcs7_padding',
|
||||||
'unpad_pkcs7',
|
'unpad_pkcs7',
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import traceback
|
import traceback
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from .utils import expand_path, write_json_file
|
from .utils import expand_path, traverse_obj, version_tuple, write_json_file
|
||||||
|
from .version import __version__
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
@@ -21,11 +22,9 @@ class Cache:
|
|||||||
return expand_path(res)
|
return expand_path(res)
|
||||||
|
|
||||||
def _get_cache_fn(self, section, key, dtype):
|
def _get_cache_fn(self, section, key, dtype):
|
||||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', section), \
|
assert re.match(r'^[\w.-]+$', section), f'invalid section {section!r}'
|
||||||
'invalid section %r' % section
|
key = urllib.parse.quote(key, safe='').replace('%', ',') # encode non-ascii characters
|
||||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', key), 'invalid key %r' % key
|
return os.path.join(self._get_root_dir(), section, f'{key}.{dtype}')
|
||||||
return os.path.join(
|
|
||||||
self._get_root_dir(), section, f'{key}.{dtype}')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def enabled(self):
|
def enabled(self):
|
||||||
@@ -39,18 +38,22 @@ class Cache:
|
|||||||
|
|
||||||
fn = self._get_cache_fn(section, key, dtype)
|
fn = self._get_cache_fn(section, key, dtype)
|
||||||
try:
|
try:
|
||||||
try:
|
os.makedirs(os.path.dirname(fn), exist_ok=True)
|
||||||
os.makedirs(os.path.dirname(fn))
|
|
||||||
except OSError as ose:
|
|
||||||
if ose.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
self._ydl.write_debug(f'Saving {section}.{key} to cache')
|
self._ydl.write_debug(f'Saving {section}.{key} to cache')
|
||||||
write_json_file(data, fn)
|
write_json_file({'yt-dlp_version': __version__, 'data': data}, fn)
|
||||||
except Exception:
|
except Exception:
|
||||||
tb = traceback.format_exc()
|
tb = traceback.format_exc()
|
||||||
self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}')
|
self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}')
|
||||||
|
|
||||||
def load(self, section, key, dtype='json', default=None):
|
def _validate(self, data, min_ver):
|
||||||
|
version = traverse_obj(data, 'yt-dlp_version')
|
||||||
|
if not version: # Backward compatibility
|
||||||
|
data, version = {'data': data}, '2022.08.19'
|
||||||
|
if not min_ver or version_tuple(version) >= version_tuple(min_ver):
|
||||||
|
return data['data']
|
||||||
|
self._ydl.write_debug(f'Discarding old cache from version {version} (needs {min_ver})')
|
||||||
|
|
||||||
|
def load(self, section, key, dtype='json', default=None, *, min_ver=None):
|
||||||
assert dtype in ('json',)
|
assert dtype in ('json',)
|
||||||
|
|
||||||
if not self.enabled:
|
if not self.enabled:
|
||||||
@@ -61,8 +64,8 @@ class Cache:
|
|||||||
try:
|
try:
|
||||||
with open(cache_fn, encoding='utf-8') as cachef:
|
with open(cache_fn, encoding='utf-8') as cachef:
|
||||||
self._ydl.write_debug(f'Loading {section}.{key} from cache')
|
self._ydl.write_debug(f'Loading {section}.{key} from cache')
|
||||||
return json.load(cachef)
|
return self._validate(json.load(cachef), min_ver)
|
||||||
except ValueError:
|
except (ValueError, KeyError):
|
||||||
try:
|
try:
|
||||||
file_size = os.path.getsize(cache_fn)
|
file_size = os.path.getsize(cache_fn)
|
||||||
except OSError as oe:
|
except OSError as oe:
|
||||||
|
|||||||
5
yt_dlp/casefold.py
Normal file
5
yt_dlp/casefold.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(DeprecationWarning(f'{__name__} is deprecated'))
|
||||||
|
|
||||||
|
casefold = str.casefold
|
||||||
@@ -1,27 +1,17 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
|
|
||||||
from . import re
|
|
||||||
from ._deprecated import * # noqa: F401, F403
|
|
||||||
from .compat_utils import passthrough_module
|
from .compat_utils import passthrough_module
|
||||||
|
|
||||||
# XXX: Implement this the same way as other DeprecationWarnings without circular import
|
passthrough_module(__name__, '._deprecated')
|
||||||
try:
|
|
||||||
passthrough_module(__name__, '._legacy', callback=lambda attr: warnings.warn(
|
|
||||||
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=2))
|
|
||||||
HAS_LEGACY = True
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
# Keep working even without _legacy module
|
|
||||||
HAS_LEGACY = False
|
|
||||||
del passthrough_module
|
del passthrough_module
|
||||||
|
|
||||||
|
|
||||||
# HTMLParseError has been deprecated in Python 3.3 and removed in
|
# HTMLParseError has been deprecated in Python 3.3 and removed in
|
||||||
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
||||||
# and uniform cross-version exception handling
|
# and uniform cross-version exception handling
|
||||||
class compat_HTMLParseError(Exception):
|
class compat_HTMLParseError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -39,7 +29,8 @@ compat_os_name = os._name if os.name == 'java' else os.name
|
|||||||
|
|
||||||
if compat_os_name == 'nt':
|
if compat_os_name == 'nt':
|
||||||
def compat_shlex_quote(s):
|
def compat_shlex_quote(s):
|
||||||
return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"')
|
import re
|
||||||
|
return s if re.match(r'^[-_\w./]+$', s) else s.replace('"', '""').join('""')
|
||||||
else:
|
else:
|
||||||
from shlex import quote as compat_shlex_quote # noqa: F401
|
from shlex import quote as compat_shlex_quote # noqa: F401
|
||||||
|
|
||||||
@@ -76,3 +67,13 @@ if compat_os_name in ('nt', 'ce'):
|
|||||||
return userhome + path[i:]
|
return userhome + path[i:]
|
||||||
else:
|
else:
|
||||||
compat_expanduser = os.path.expanduser
|
compat_expanduser = os.path.expanduser
|
||||||
|
|
||||||
|
|
||||||
|
def urllib_req_to_req(urllib_request):
|
||||||
|
"""Convert urllib Request to a networking Request"""
|
||||||
|
from ..networking import Request
|
||||||
|
from ..utils.networking import HTTPHeaderDict
|
||||||
|
return Request(
|
||||||
|
urllib_request.get_full_url(), data=urllib_request.data, method=urllib_request.get_method(),
|
||||||
|
headers=HTTPHeaderDict(urllib_request.headers, urllib_request.unredirected_hdrs),
|
||||||
|
extensions={'timeout': urllib_request.timeout} if hasattr(urllib_request, 'timeout') else None)
|
||||||
|
|||||||
@@ -1,4 +1,12 @@
|
|||||||
"""Deprecated - New code should avoid these"""
|
"""Deprecated - New code should avoid these"""
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from .compat_utils import passthrough_module
|
||||||
|
|
||||||
|
# XXX: Implement this the same way as other DeprecationWarnings without circular import
|
||||||
|
passthrough_module(__name__, '.._legacy', callback=lambda attr: warnings.warn(
|
||||||
|
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=6))
|
||||||
|
del passthrough_module
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@@ -8,7 +16,6 @@ compat_str = str
|
|||||||
|
|
||||||
compat_b64decode = base64.b64decode
|
compat_b64decode = base64.b64decode
|
||||||
|
|
||||||
compat_HTTPError = urllib.error.HTTPError
|
|
||||||
compat_urlparse = urllib.parse
|
compat_urlparse = urllib.parse
|
||||||
compat_parse_qs = urllib.parse.parse_qs
|
compat_parse_qs = urllib.parse.parse_qs
|
||||||
compat_urllib_parse_unquote = urllib.parse.unquote
|
compat_urllib_parse_unquote = urllib.parse.unquote
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
""" Do not use! """
|
""" Do not use! """
|
||||||
|
|
||||||
|
import base64
|
||||||
import collections
|
import collections
|
||||||
import ctypes
|
import ctypes
|
||||||
import getpass
|
import getpass
|
||||||
@@ -15,20 +16,25 @@ import shlex
|
|||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
|
import subprocess
|
||||||
import tokenize
|
import tokenize
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
from subprocess import DEVNULL
|
|
||||||
|
|
||||||
from .compat_utils import passthrough_module # isort: split
|
# isort: split
|
||||||
from .asyncio import run as compat_asyncio_run # noqa: F401
|
import asyncio # noqa: F401
|
||||||
from .re import Pattern as compat_Pattern # noqa: F401
|
import re # noqa: F401
|
||||||
from .re import match as compat_Match # noqa: F401
|
from asyncio import run as compat_asyncio_run # noqa: F401
|
||||||
from ..dependencies import Cryptodome_AES as compat_pycrypto_AES # noqa: F401
|
from re import Pattern as compat_Pattern # noqa: F401
|
||||||
|
from re import match as compat_Match # noqa: F401
|
||||||
|
|
||||||
|
from . import compat_expanduser, compat_HTMLParseError, compat_realpath
|
||||||
|
from .compat_utils import passthrough_module
|
||||||
from ..dependencies import brotli as compat_brotli # noqa: F401
|
from ..dependencies import brotli as compat_brotli # noqa: F401
|
||||||
from ..dependencies import websockets as compat_websockets # noqa: F401
|
from ..dependencies import websockets as compat_websockets # noqa: F401
|
||||||
|
from ..dependencies.Cryptodome import AES as compat_pycrypto_AES # noqa: F401
|
||||||
|
|
||||||
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
||||||
|
|
||||||
@@ -43,40 +49,48 @@ def compat_setenv(key, value, env=os.environ):
|
|||||||
env[key] = value
|
env[key] = value
|
||||||
|
|
||||||
|
|
||||||
|
compat_base64_b64decode = base64.b64decode
|
||||||
compat_basestring = str
|
compat_basestring = str
|
||||||
|
compat_casefold = str.casefold
|
||||||
compat_chr = chr
|
compat_chr = chr
|
||||||
compat_collections_abc = collections.abc
|
compat_collections_abc = collections.abc
|
||||||
compat_cookiejar = http.cookiejar
|
compat_cookiejar = compat_http_cookiejar = http.cookiejar
|
||||||
compat_cookiejar_Cookie = http.cookiejar.Cookie
|
compat_cookiejar_Cookie = compat_http_cookiejar_Cookie = http.cookiejar.Cookie
|
||||||
compat_cookies = http.cookies
|
compat_cookies = compat_http_cookies = http.cookies
|
||||||
compat_cookies_SimpleCookie = http.cookies.SimpleCookie
|
compat_cookies_SimpleCookie = compat_http_cookies_SimpleCookie = http.cookies.SimpleCookie
|
||||||
compat_etree_Element = etree.Element
|
compat_etree_Element = compat_xml_etree_ElementTree_Element = etree.Element
|
||||||
compat_etree_register_namespace = etree.register_namespace
|
compat_etree_register_namespace = compat_xml_etree_register_namespace = etree.register_namespace
|
||||||
compat_filter = filter
|
compat_filter = filter
|
||||||
compat_get_terminal_size = shutil.get_terminal_size
|
compat_get_terminal_size = shutil.get_terminal_size
|
||||||
compat_getenv = os.getenv
|
compat_getenv = os.getenv
|
||||||
compat_getpass = getpass.getpass
|
compat_getpass = compat_getpass_getpass = getpass.getpass
|
||||||
compat_html_entities = html.entities
|
compat_html_entities = html.entities
|
||||||
compat_html_entities_html5 = html.entities.html5
|
compat_html_entities_html5 = html.entities.html5
|
||||||
compat_HTMLParser = html.parser.HTMLParser
|
compat_html_parser_HTMLParseError = compat_HTMLParseError
|
||||||
|
compat_HTMLParser = compat_html_parser_HTMLParser = html.parser.HTMLParser
|
||||||
compat_http_client = http.client
|
compat_http_client = http.client
|
||||||
compat_http_server = http.server
|
compat_http_server = http.server
|
||||||
|
compat_HTTPError = urllib.error.HTTPError
|
||||||
compat_input = input
|
compat_input = input
|
||||||
compat_integer_types = (int, )
|
compat_integer_types = (int, )
|
||||||
compat_itertools_count = itertools.count
|
compat_itertools_count = itertools.count
|
||||||
compat_kwargs = lambda kwargs: kwargs
|
compat_kwargs = lambda kwargs: kwargs
|
||||||
compat_map = map
|
compat_map = map
|
||||||
compat_numeric_types = (int, float, complex)
|
compat_numeric_types = (int, float, complex)
|
||||||
|
compat_os_path_expanduser = compat_expanduser
|
||||||
|
compat_os_path_realpath = compat_realpath
|
||||||
compat_print = print
|
compat_print = print
|
||||||
compat_shlex_split = shlex.split
|
compat_shlex_split = shlex.split
|
||||||
compat_socket_create_connection = socket.create_connection
|
compat_socket_create_connection = socket.create_connection
|
||||||
compat_Struct = struct.Struct
|
compat_Struct = struct.Struct
|
||||||
compat_struct_pack = struct.pack
|
compat_struct_pack = struct.pack
|
||||||
compat_struct_unpack = struct.unpack
|
compat_struct_unpack = struct.unpack
|
||||||
compat_subprocess_get_DEVNULL = lambda: DEVNULL
|
compat_subprocess_get_DEVNULL = lambda: subprocess.DEVNULL
|
||||||
compat_tokenize_tokenize = tokenize.tokenize
|
compat_tokenize_tokenize = tokenize.tokenize
|
||||||
compat_urllib_error = urllib.error
|
compat_urllib_error = urllib.error
|
||||||
|
compat_urllib_HTTPError = urllib.error.HTTPError
|
||||||
compat_urllib_parse = urllib.parse
|
compat_urllib_parse = urllib.parse
|
||||||
|
compat_urllib_parse_parse_qs = urllib.parse.parse_qs
|
||||||
compat_urllib_parse_quote = urllib.parse.quote
|
compat_urllib_parse_quote = urllib.parse.quote
|
||||||
compat_urllib_parse_quote_plus = urllib.parse.quote_plus
|
compat_urllib_parse_quote_plus = urllib.parse.quote_plus
|
||||||
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
||||||
@@ -85,8 +99,10 @@ compat_urllib_parse_urlunparse = urllib.parse.urlunparse
|
|||||||
compat_urllib_request = urllib.request
|
compat_urllib_request = urllib.request
|
||||||
compat_urllib_request_DataHandler = urllib.request.DataHandler
|
compat_urllib_request_DataHandler = urllib.request.DataHandler
|
||||||
compat_urllib_response = urllib.response
|
compat_urllib_response = urllib.response
|
||||||
compat_urlretrieve = urllib.request.urlretrieve
|
compat_urlretrieve = compat_urllib_request_urlretrieve = urllib.request.urlretrieve
|
||||||
compat_xml_parse_error = etree.ParseError
|
compat_xml_parse_error = compat_xml_etree_ElementTree_ParseError = etree.ParseError
|
||||||
compat_xpath = lambda xpath: xpath
|
compat_xpath = lambda xpath: xpath
|
||||||
compat_zip = zip
|
compat_zip = zip
|
||||||
workaround_optparse_bug9161 = lambda: None
|
workaround_optparse_bug9161 = lambda: None
|
||||||
|
|
||||||
|
legacy = []
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
# flake8: noqa: F405
|
|
||||||
from asyncio import * # noqa: F403
|
|
||||||
|
|
||||||
from .compat_utils import passthrough_module
|
|
||||||
|
|
||||||
passthrough_module(__name__, 'asyncio')
|
|
||||||
del passthrough_module
|
|
||||||
|
|
||||||
try:
|
|
||||||
run # >= 3.7
|
|
||||||
except NameError:
|
|
||||||
def run(coro):
|
|
||||||
try:
|
|
||||||
loop = get_event_loop()
|
|
||||||
except RuntimeError:
|
|
||||||
loop = new_event_loop()
|
|
||||||
set_event_loop(loop)
|
|
||||||
loop.run_until_complete(coro)
|
|
||||||
|
|
||||||
try:
|
|
||||||
all_tasks # >= 3.7
|
|
||||||
except NameError:
|
|
||||||
all_tasks = Task.all_tasks
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import functools
|
||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
import types
|
import types
|
||||||
@@ -10,61 +11,73 @@ _Package = collections.namedtuple('Package', ('name', 'version'))
|
|||||||
|
|
||||||
|
|
||||||
def get_package_info(module):
|
def get_package_info(module):
|
||||||
parent = module.__name__.split('.')[0]
|
return _Package(
|
||||||
parent_module = None
|
name=getattr(module, '_yt_dlp__identifier', module.__name__),
|
||||||
with contextlib.suppress(ImportError):
|
version=str(next(filter(None, (
|
||||||
parent_module = importlib.import_module(parent)
|
getattr(module, attr, None)
|
||||||
|
for attr in ('_yt_dlp__version', '__version__', 'version_string', 'version')
|
||||||
for attr in ('__version__', 'version_string', 'version'):
|
)), None)))
|
||||||
version = getattr(parent_module, attr, None)
|
|
||||||
if version is not None:
|
|
||||||
break
|
|
||||||
return _Package(getattr(module, '_yt_dlp__identifier', parent), str(version))
|
|
||||||
|
|
||||||
|
|
||||||
def _is_package(module):
|
def _is_package(module):
|
||||||
try:
|
return '__path__' in vars(module)
|
||||||
module.__getattribute__('__path__')
|
|
||||||
except AttributeError:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def passthrough_module(parent, child, allowed_attributes=None, *, callback=lambda _: None):
|
def _is_dunder(name):
|
||||||
parent_module = importlib.import_module(parent)
|
return name.startswith('__') and name.endswith('__')
|
||||||
child_module = None # Import child module only as needed
|
|
||||||
|
|
||||||
class PassthroughModule(types.ModuleType):
|
|
||||||
def __getattr__(self, attr):
|
|
||||||
if _is_package(parent_module):
|
|
||||||
with contextlib.suppress(ImportError):
|
|
||||||
return importlib.import_module(f'.{attr}', parent)
|
|
||||||
|
|
||||||
ret = self.__from_child(attr)
|
class EnhancedModule(types.ModuleType):
|
||||||
if ret is _NO_ATTRIBUTE:
|
def __bool__(self):
|
||||||
raise AttributeError(f'module {parent} has no attribute {attr}')
|
return vars(self).get('__bool__', lambda: True)()
|
||||||
callback(attr)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def __from_child(self, attr):
|
def __getattribute__(self, attr):
|
||||||
if allowed_attributes is None:
|
try:
|
||||||
if attr.startswith('__') and attr.endswith('__'):
|
ret = super().__getattribute__(attr)
|
||||||
return _NO_ATTRIBUTE
|
except AttributeError:
|
||||||
elif attr not in allowed_attributes:
|
if _is_dunder(attr):
|
||||||
|
raise
|
||||||
|
getter = getattr(self, '__getattr__', None)
|
||||||
|
if not getter:
|
||||||
|
raise
|
||||||
|
ret = getter(attr)
|
||||||
|
return ret.fget() if isinstance(ret, property) else ret
|
||||||
|
|
||||||
|
|
||||||
|
def passthrough_module(parent, child, allowed_attributes=(..., ), *, callback=lambda _: None):
|
||||||
|
"""Passthrough parent module into a child module, creating the parent if necessary"""
|
||||||
|
def __getattr__(attr):
|
||||||
|
if _is_package(parent):
|
||||||
|
with contextlib.suppress(ModuleNotFoundError):
|
||||||
|
return importlib.import_module(f'.{attr}', parent.__name__)
|
||||||
|
|
||||||
|
ret = from_child(attr)
|
||||||
|
if ret is _NO_ATTRIBUTE:
|
||||||
|
raise AttributeError(f'module {parent.__name__} has no attribute {attr}')
|
||||||
|
callback(attr)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def from_child(attr):
|
||||||
|
nonlocal child
|
||||||
|
if attr not in allowed_attributes:
|
||||||
|
if ... not in allowed_attributes or _is_dunder(attr):
|
||||||
return _NO_ATTRIBUTE
|
return _NO_ATTRIBUTE
|
||||||
|
|
||||||
nonlocal child_module
|
if isinstance(child, str):
|
||||||
child_module = child_module or importlib.import_module(child, parent)
|
child = importlib.import_module(child, parent.__name__)
|
||||||
|
|
||||||
with contextlib.suppress(AttributeError):
|
if _is_package(child):
|
||||||
return getattr(child_module, attr)
|
with contextlib.suppress(ImportError):
|
||||||
|
return passthrough_module(f'{parent.__name__}.{attr}',
|
||||||
|
importlib.import_module(f'.{attr}', child.__name__))
|
||||||
|
|
||||||
if _is_package(child_module):
|
with contextlib.suppress(AttributeError):
|
||||||
with contextlib.suppress(ImportError):
|
return getattr(child, attr)
|
||||||
return importlib.import_module(f'.{attr}', child)
|
|
||||||
|
|
||||||
return _NO_ATTRIBUTE
|
return _NO_ATTRIBUTE
|
||||||
|
|
||||||
# Python 3.6 does not have module level __getattr__
|
parent = sys.modules.get(parent, types.ModuleType(parent))
|
||||||
# https://peps.python.org/pep-0562/
|
parent.__class__ = EnhancedModule
|
||||||
sys.modules[parent].__class__ = PassthroughModule
|
parent.__getattr__ = __getattr__
|
||||||
|
return parent
|
||||||
|
|||||||
@@ -2,13 +2,15 @@ tests = {
|
|||||||
'webp': lambda h: h[0:4] == b'RIFF' and h[8:] == b'WEBP',
|
'webp': lambda h: h[0:4] == b'RIFF' and h[8:] == b'WEBP',
|
||||||
'png': lambda h: h[:8] == b'\211PNG\r\n\032\n',
|
'png': lambda h: h[:8] == b'\211PNG\r\n\032\n',
|
||||||
'jpeg': lambda h: h[6:10] in (b'JFIF', b'Exif'),
|
'jpeg': lambda h: h[6:10] in (b'JFIF', b'Exif'),
|
||||||
|
'gif': lambda h: h[:6] in (b'GIF87a', b'GIF89a'),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def what(path):
|
def what(file=None, h=None):
|
||||||
"""Detect format of image (Currently supports jpeg, png, webp only)
|
"""Detect format of image (Currently supports jpeg, png, webp, gif only)
|
||||||
Ref: https://github.com/python/cpython/blob/3.10/Lib/imghdr.py
|
Ref: https://github.com/python/cpython/blob/3.10/Lib/imghdr.py
|
||||||
"""
|
"""
|
||||||
with open(path, 'rb') as f:
|
if h is None:
|
||||||
head = f.read(12)
|
with open(file, 'rb') as f:
|
||||||
return next((type_ for type_, test in tests.items() if test(head)), None)
|
h = f.read(12)
|
||||||
|
return next((type_ for type_, test in tests.items() if test(h)), None)
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
# flake8: noqa: F405
|
|
||||||
from re import * # F403
|
|
||||||
|
|
||||||
from .compat_utils import passthrough_module
|
|
||||||
|
|
||||||
passthrough_module(__name__, 're')
|
|
||||||
del passthrough_module
|
|
||||||
|
|
||||||
try:
|
|
||||||
Pattern # >= 3.7
|
|
||||||
except NameError:
|
|
||||||
Pattern = type(compile(''))
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
Match # >= 3.7
|
|
||||||
except NameError:
|
|
||||||
Match = type(compile('').match(''))
|
|
||||||
30
yt_dlp/compat/shutil.py
Normal file
30
yt_dlp/compat/shutil.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# flake8: noqa: F405
|
||||||
|
from shutil import * # noqa: F403
|
||||||
|
|
||||||
|
from .compat_utils import passthrough_module
|
||||||
|
|
||||||
|
passthrough_module(__name__, 'shutil')
|
||||||
|
del passthrough_module
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.platform.startswith('freebsd'):
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# Workaround for PermissionError when using restricted ACL mode on FreeBSD
|
||||||
|
def copy2(src, dst, *args, **kwargs):
|
||||||
|
if os.path.isdir(dst):
|
||||||
|
dst = os.path.join(dst, os.path.basename(src))
|
||||||
|
shutil.copyfile(src, dst, *args, **kwargs)
|
||||||
|
try:
|
||||||
|
shutil.copystat(src, dst, *args, **kwargs)
|
||||||
|
except PermissionError as e:
|
||||||
|
if e.errno != getattr(errno, 'EPERM', None):
|
||||||
|
raise
|
||||||
|
return dst
|
||||||
|
|
||||||
|
def move(*args, copy_function=copy2, **kwargs):
|
||||||
|
return shutil.move(*args, copy_function=copy_function, **kwargs)
|
||||||
13
yt_dlp/compat/types.py
Normal file
13
yt_dlp/compat/types.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# flake8: noqa: F405
|
||||||
|
from types import * # noqa: F403
|
||||||
|
|
||||||
|
from .compat_utils import passthrough_module
|
||||||
|
|
||||||
|
passthrough_module(__name__, 'types')
|
||||||
|
del passthrough_module
|
||||||
|
|
||||||
|
try:
|
||||||
|
# NB: pypy has builtin NoneType, so checking NameError won't work
|
||||||
|
from types import NoneType # >= 3.10
|
||||||
|
except ImportError:
|
||||||
|
NoneType = type(None)
|
||||||
10
yt_dlp/compat/urllib/__init__.py
Normal file
10
yt_dlp/compat/urllib/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# flake8: noqa: F405
|
||||||
|
from urllib import * # noqa: F403
|
||||||
|
|
||||||
|
del request # noqa: F821
|
||||||
|
from . import request # noqa: F401
|
||||||
|
|
||||||
|
from ..compat_utils import passthrough_module
|
||||||
|
|
||||||
|
passthrough_module(__name__, 'urllib')
|
||||||
|
del passthrough_module
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user