mirror of
https://github.com/crunchy-labs/crunchy-cli.git
synced 2026-01-21 12:12:00 -06:00
Compare commits
816 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4332b1beef | ||
|
|
2cf9125de3 | ||
|
|
756022b955 | ||
|
|
509683d23a | ||
|
|
8047680799 | ||
|
|
287df84382 | ||
|
|
e7ac6d8874 | ||
|
|
fb8e535644 | ||
|
|
67c267be20 | ||
|
|
a1c7b2069d | ||
|
|
74e5e05b0f | ||
|
|
7d2ae719c8 | ||
|
|
5593046aae | ||
|
|
f8bd092987 | ||
|
|
cbe57e2b6e | ||
|
|
f7ce888329 | ||
|
|
301dac478f | ||
|
|
9819b62259 | ||
|
|
5279a9b759 | ||
|
|
a98e31f959 | ||
|
|
590242712b | ||
|
|
817963af4f | ||
|
|
48bb7a5ef6 | ||
|
|
53a710a373 | ||
|
|
ab63dcd2e0 | ||
|
|
4d1df83342 | ||
|
|
89b9c5db39 | ||
|
|
96d3de48cf | ||
|
|
dad91dba91 | ||
|
|
757d3094ea | ||
|
|
fca1b74cac | ||
|
|
0f7d7d928c | ||
|
|
f77804fcb5 | ||
|
|
4066b8511c | ||
|
|
dcbe433a9c | ||
|
|
55f1e1d32d | ||
|
|
442173c08c | ||
|
|
173292ff32 | ||
|
|
72c574c883 | ||
|
|
f237033aff | ||
|
|
bf28dbf1ce | ||
|
|
cf8bfb02ac | ||
|
|
74aaed4e7a | ||
|
|
177ceb1920 | ||
|
|
541f0e2747 | ||
|
|
777b39aba1 | ||
|
|
4f3475131c | ||
|
|
177aa37631 | ||
|
|
8fff807ae6 | ||
|
|
db6e45e7f4 | ||
|
|
8ada822396 | ||
|
|
9bdd3aa85b | ||
|
|
4fc20c7c1c | ||
|
|
6515d3025f | ||
|
|
fe17f3951e | ||
|
|
cdad7fc000 | ||
|
|
d7dac2acd4 | ||
|
|
dbbb445c55 | ||
|
|
733d9f9787 | ||
|
|
0257fdea0d | ||
|
|
9e5feef4d4 | ||
|
|
b9f5fadbb3 | ||
|
|
ea39dcbc71 | ||
|
|
a73773ce1d | ||
|
|
0115730d60 | ||
|
|
18534b259b | ||
|
|
77103ff1f1 | ||
|
|
771594a231 | ||
|
|
1a511e12f9 | ||
|
|
fe49161e93 | ||
|
|
25cde6163c | ||
|
|
4b74299733 | ||
|
|
c40ea8b132 | ||
|
|
6b6d24a575 | ||
|
|
8c1868f2fd | ||
|
|
af8ab24826 | ||
|
|
c0f3346846 | ||
|
|
111e461b30 | ||
|
|
f16cd25ea4 | ||
|
|
e694046b07 | ||
|
|
ba8028737d | ||
|
|
89be8ac429 | ||
|
|
26a858c1a1 | ||
|
|
d3696c783c | ||
|
|
88a28e843f | ||
|
|
a0fa2bfd8a | ||
|
|
013273b832 | ||
|
|
3bf2458774 | ||
|
|
e3a7fd9246 | ||
|
|
f1d266c940 | ||
|
|
3f33db6728 | ||
|
|
56f0ed1795 | ||
|
|
9c44fa7dae | ||
|
|
3099aac0e7 | ||
|
|
9a6959970a | ||
|
|
d2589a3a6f | ||
|
|
52da6eacc9 | ||
|
|
5634ce3277 | ||
|
|
6a7aa25e1a | ||
|
|
6a50567916 | ||
|
|
2084328069 | ||
|
|
d3ab2245a8 | ||
|
|
c31b1f4db9 | ||
|
|
8187269128 | ||
|
|
5d68f0334a | ||
|
|
a2464bad4e | ||
|
|
0f06c7ac71 | ||
|
|
f8309f2e80 | ||
|
|
982e521e0b | ||
|
|
a4abb14ae3 | ||
|
|
7cf7a8e71c | ||
|
|
3b9fc52890 | ||
|
|
444dc65a29 | ||
|
|
658bb86800 | ||
|
|
6e01e9e8a7 | ||
|
|
937e9a2fdc | ||
|
|
fbe182239a | ||
|
|
5490243df8 | ||
|
|
20f796f603 | ||
|
|
f3faa5bf94 | ||
|
|
3f401ccbd7 | ||
|
|
35447c5cb0 | ||
|
|
333d574e56 | ||
|
|
7c42f29596 | ||
|
|
ef2898f0e1 | ||
|
|
650338d3e6 | ||
|
|
c37d55aade | ||
|
|
d90f45fa31 | ||
|
|
99f96e3e35 | ||
|
|
d3837f2495 | ||
|
|
fc6da9a76d | ||
|
|
283a3802b2 | ||
|
|
172e3612d0 | ||
|
|
2e6246c439 | ||
|
|
d503d459cd | ||
|
|
19935df545 | ||
|
|
0da81a4814 | ||
|
|
0a26083232 | ||
|
|
8613ea80cc | ||
|
|
b97c2a922e | ||
|
|
be3248a4f9 | ||
|
|
f9e431e181 | ||
|
|
77609be598 | ||
|
|
b4057599a1 | ||
|
|
6c7ab04b99 | ||
|
|
9487dd3dbf | ||
|
|
9ca3b79291 | ||
|
|
8f77028fcb | ||
|
|
d5df3df95f | ||
|
|
440ccd99b5 | ||
|
|
2c37093959 | ||
|
|
14e71c05b8 | ||
|
|
d52fe7fb92 | ||
|
|
c08931b610 | ||
|
|
fc6511a361 | ||
|
|
56411c6547 | ||
|
|
4d01e2a4ec | ||
|
|
cd35dfe276 | ||
|
|
f31437fba2 | ||
|
|
e5d9c27af7 | ||
|
|
787d8ab02c | ||
|
|
7594412f58 | ||
|
|
d8b76f8cc7 | ||
|
|
f56d9ecabf | ||
|
|
5a3a304443 | ||
|
|
d0fe7f54f6 | ||
|
|
685c79d673 | ||
|
|
5d17bb1ac7 | ||
|
|
568bce0008 | ||
|
|
bbb5a78765 | ||
|
|
81385ef6ce | ||
|
|
13335c020b | ||
|
|
e5db8e9504 | ||
|
|
5bc68ad592 | ||
|
|
7095e2b8b6 | ||
|
|
610593a795 | ||
|
|
9596175f7f | ||
|
|
f48474ba77 | ||
|
|
d79197edc6 | ||
|
|
a93a1fa807 | ||
|
|
3e21ca4fe7 | ||
|
|
01913e0db3 | ||
|
|
64428ea7d1 | ||
|
|
8eda8df3f7 | ||
|
|
185b65fc9b | ||
|
|
7485bd8e76 | ||
|
|
0f7e6c9120 | ||
|
|
b477ca982c | ||
|
|
18f891efd2 | ||
|
|
3ae6fe4a1a | ||
|
|
e06e6b2b01 | ||
|
|
70b3a7a3e1 | ||
|
|
a80f6e5df4 | ||
|
|
2f57b07559 | ||
|
|
596fcc2342 | ||
|
|
70b41b4dd5 | ||
|
|
6a6b981979 | ||
|
|
92ed4bd87d | ||
|
|
d295a57f84 | ||
|
|
31fe1460f1 | ||
|
|
f45bb19cd7 | ||
|
|
6da292f013 | ||
|
|
a45833f5a2 | ||
|
|
448b633be8 | ||
|
|
800df5ca6c | ||
|
|
9f9aec1f8a | ||
|
|
b98332eae4 | ||
|
|
40397e96a3 | ||
|
|
aef2fddff7 | ||
|
|
a12a8bc366 | ||
|
|
6f40ffacec | ||
|
|
2bcaa6e4d5 | ||
|
|
0586f38cdc | ||
|
|
435b75bbf9 | ||
|
|
b1342d54f3 | ||
|
|
700b041f9a | ||
|
|
84c70f2bee | ||
|
|
4c396a9e4a | ||
|
|
9ced3483d8 | ||
|
|
db156d361f | ||
|
|
5afda0b5f1 | ||
|
|
068c0fcac1 | ||
|
|
00e8082e66 | ||
|
|
dc6bc0d951 | ||
|
|
4ec9a0d309 | ||
|
|
566422cb06 | ||
|
|
dd2033d323 | ||
|
|
8490263e84 | ||
|
|
6b76887978 | ||
|
|
850aa7a969 | ||
|
|
9ad27102fc | ||
|
|
513353890d | ||
|
|
49de7bbba9 | ||
|
|
751735477c | ||
|
|
1fe8746dda | ||
|
|
af8a88a792 | ||
|
|
f40dc0dd1c | ||
|
|
0234d46bf9 | ||
|
|
618d2206a2 | ||
|
|
75b6e7b452 | ||
|
|
fc44b8af8a | ||
|
|
d75c04fbb6 | ||
|
|
f4682e0f29 | ||
|
|
5b8a4b9969 | ||
|
|
0ef4980ab3 | ||
|
|
2ebc76a0df | ||
|
|
f7af983526 | ||
|
|
0cd647fb14 | ||
|
|
4e4a4355f5 | ||
|
|
0b044ba27e | ||
|
|
26ca3ca65c | ||
|
|
e9b4837f44 | ||
|
|
0aa648b1a5 | ||
|
|
0beaa99bfd | ||
|
|
7ed1158339 | ||
|
|
b55ac9a51a | ||
|
|
32aab193d0 | ||
|
|
49d64805ca | ||
|
|
19f79a4349 | ||
|
|
f3f900064a | ||
|
|
a2b7c78752 | ||
|
|
4bd172df06 | ||
|
|
b24827dc6b | ||
|
|
61766c74fa | ||
|
|
c2e953043e | ||
|
|
dc431a9637 | ||
|
|
0f73d8dbec | ||
|
|
94fcf1590a | ||
|
|
7b1ed30b20 | ||
|
|
ce358041be | ||
|
|
ff258c0722 | ||
|
|
e277b4200f | ||
|
|
847c6a1abc | ||
|
|
c0e2df4804 | ||
|
|
c4a4651164 | ||
|
|
13d8cc26c9 | ||
|
|
d754f9339b | ||
|
|
95f8cc542c | ||
|
|
3c648f4192 | ||
|
|
d8d1f8a443 | ||
|
|
f584c8028f | ||
|
|
273db9fe6a | ||
|
|
bfc50653b1 | ||
|
|
c4c15f9b11 | ||
|
|
d33e2fa36b | ||
|
|
5f98cfb186 | ||
|
|
ee1344fc6b | ||
|
|
baa6ca5018 | ||
|
|
fcbcd175e1 | ||
|
|
481f35d232 | ||
|
|
cb7612e86b | ||
|
|
d79f00871e | ||
|
|
7e34076a7b | ||
|
|
8d1be6b573 | ||
|
|
bd61c18859 | ||
|
|
8e972ab578 | ||
|
|
1213880df7 | ||
|
|
8a52307845 | ||
|
|
e819e44671 | ||
|
|
c97adb3ce7 | ||
|
|
bd20c5a7b6 | ||
|
|
55483878b3 | ||
|
|
b1d23c2f25 | ||
|
|
ba1c0aaaa4 | ||
|
|
a7adb7191e | ||
|
|
57236f2b31 | ||
|
|
0a40f3c40f | ||
|
|
a6bfe0be2e | ||
|
|
d6f1262c1c | ||
|
|
19f9d26af9 | ||
|
|
90212c4ec0 | ||
|
|
c315f87f33 | ||
|
|
656ce0b523 | ||
|
|
758db86f2f | ||
|
|
e4919e80ba | ||
|
|
fbc98b2308 | ||
|
|
03dd1c5264 | ||
|
|
cba921f1a8 | ||
|
|
264d943a2c | ||
|
|
ba57d3c25d | ||
|
|
96b259ce9a | ||
|
|
1a08e76162 | ||
|
|
e83de60efa | ||
|
|
43e32e1453 | ||
|
|
e115dcd87f | ||
|
|
32691e6fa5 | ||
|
|
21a5782825 | ||
|
|
b3226cdde5 | ||
|
|
cdf054ff58 | ||
|
|
1844a563d2 | ||
|
|
901fdc0dbc | ||
|
|
6bd75c93cb | ||
|
|
3dd8385aac | ||
|
|
577c0679ad | ||
|
|
685ac85857 | ||
|
|
b5bc36c4a2 | ||
|
|
497f22ee49 | ||
|
|
3d145b021b | ||
|
|
08c4e30a06 | ||
|
|
6d1f8d49f6 | ||
|
|
17233f2fd2 | ||
|
|
4482d5482f | ||
|
|
5ce5b249c9 | ||
|
|
3029325776 | ||
|
|
a0aab3bfb9 | ||
|
|
2ea036d4c6 | ||
|
|
7d3a90e811 | ||
|
|
29845ba6e5 | ||
|
|
12be16417f | ||
|
|
4b33ef02c6 | ||
|
|
13f54c0da6 | ||
|
|
b65c0e9dfd | ||
|
|
537158cd7b | ||
|
|
b991614dc3 | ||
|
|
7588621f34 | ||
|
|
54dfe8002e | ||
|
|
06fd9a7a98 | ||
|
|
892407d1f0 | ||
|
|
404aa496e1 | ||
|
|
7726287859 | ||
|
|
d0a8103e3d | ||
|
|
29c6129e6e | ||
|
|
c11851adc9 | ||
|
|
83bd71916c | ||
|
|
b365bda5dc | ||
|
|
3c3b7b6566 | ||
|
|
fae5d69933 | ||
|
|
0c13942016 | ||
|
|
03db38b31c | ||
|
|
6267fd3ba7 | ||
|
|
b8e46099f9 | ||
|
|
7115c5546d | ||
|
|
d0681c7f6c | ||
|
|
c2ae622d01 | ||
|
|
8a3c0132e7 | ||
|
|
240e5563a3 | ||
|
|
c5940a240c | ||
|
|
9e0edda7c2 | ||
|
|
14f42833cb | ||
|
|
c37e2495e1 | ||
|
|
2c3bd78fc1 | ||
|
|
022f23e3ab | ||
|
|
17fa045c32 | ||
|
|
86759557fe | ||
|
|
af9aca4d0c | ||
|
|
8bb2c9c750 | ||
|
|
67bbc00d87 | ||
|
|
4bfc6f22e1 | ||
|
|
4f107d8cf2 | ||
|
|
5de4a83e5d | ||
|
|
306019d8b8 | ||
|
|
2451e33639 | ||
|
|
03fe0c6f01 | ||
|
|
50c520d660 | ||
|
|
d49f2d8eaa | ||
|
|
cc9342cd0a | ||
|
|
f254df3bb3 | ||
|
|
52ee0c48e1 | ||
|
|
db3697c372 | ||
|
|
b814529aa2 | ||
|
|
578e5ea5b7 | ||
|
|
a5e60ea6b7 | ||
|
|
5c3f49e9f4 | ||
|
|
01e2603e84 | ||
|
|
f0de4509c5 | ||
|
|
2f7e992a6e | ||
|
|
985ec2ade9 | ||
|
|
c4540ada50 | ||
|
|
54018f9773 | ||
|
|
a32d3aef87 | ||
|
|
2e4e897dc1 | ||
|
|
ce5672588e | ||
|
|
933d217b63 | ||
|
|
1b1756a0ae | ||
|
|
6832c69eaa | ||
|
|
91f8a82ca4 | ||
|
|
4cd46f19ac | ||
|
|
c383b4d307 | ||
|
|
faadd89fff | ||
|
|
342cf23ae0 | ||
|
|
285d27772c | ||
|
|
7c3bbfc173 | ||
|
|
64717fd405 | ||
|
|
cd1308426e | ||
|
|
5826d95e6a | ||
|
|
135d59ce8b | ||
|
|
9d45995e86 | ||
|
|
afab3826c9 | ||
|
|
e9b3088cde | ||
|
|
cd9c69baf1 | ||
|
|
33e27504f2 | ||
|
|
3fcb512c18 | ||
|
|
6aa4078be3 | ||
|
|
474e9f5e31 | ||
|
|
f6d6c9435c | ||
|
|
f687969f04 | ||
|
|
64bb39362e | ||
|
|
0bb20d24a2 | ||
|
|
a3c717dc1a | ||
|
|
12d49a27e4 | ||
|
|
6ecd23bcd0 | ||
|
|
e200aab8ab | ||
|
|
4095b80477 | ||
|
|
99002e606f | ||
|
|
2d89a71203 | ||
|
|
b1182d4f7b | ||
|
|
59b5e3d239 | ||
|
|
24fbedc7d7 | ||
|
|
c99eedd7a7 | ||
|
|
81931829b0 | ||
|
|
1487ba222e | ||
|
|
7fe587a891 | ||
|
|
9bfe6b0e54 | ||
|
|
f3f41aa0a2 | ||
|
|
b118b74b99 | ||
|
|
45c315e9bb | ||
|
|
8d8333e414 | ||
|
|
a760588441 | ||
|
|
3f4ce3a0a9 | ||
|
|
3e7d2583b7 | ||
|
|
a7c2bbe807 | ||
|
|
4fd98723ea | ||
|
|
502cb39923 | ||
|
|
039d7cfb81 | ||
|
|
59e8793a2f | ||
|
|
10617df834 | ||
|
|
95b66c3ff5 | ||
|
|
0572af4e07 | ||
|
|
f3e93ba2b8 | ||
|
|
dc7e5d564e | ||
|
|
e0d100b627 | ||
|
|
cd1587c613 | ||
|
|
eba2417f4e | ||
|
|
36bdc76a48 | ||
|
|
1e17c0b117 | ||
|
|
b4bc047b30 | ||
|
|
d4bef511cb | ||
|
|
2f08aeac1a | ||
|
|
3f12cbae95 | ||
|
|
b99c3395f1 | ||
|
|
b42c87c9f8 | ||
|
|
136d970fec | ||
|
|
8111f14da3 | ||
|
|
97dd801137 | ||
|
|
13798b608e | ||
|
|
62938a500f | ||
|
|
36c1423ff6 | ||
|
|
027047fc7e | ||
|
|
689bbcd9a4 | ||
|
|
610c4e2993 | ||
|
|
d53b20717a | ||
|
|
dc2309ab10 | ||
|
|
c02306ff9f | ||
|
|
3f78101eb8 | ||
|
|
afc85350ab | ||
|
|
0371b31dcc | ||
|
|
f974d5296b | ||
|
|
e5636df969 | ||
|
|
d13e5714f8 | ||
|
|
a907958a71 | ||
|
|
416507c8a6 | ||
|
|
4ae4345c40 | ||
|
|
ac876f674a | ||
|
|
441ec084af | ||
|
|
f7a21fbfb2 | ||
|
|
6239d10d22 | ||
|
|
a64981930b | ||
|
|
caeb734b2c | ||
|
|
b5f4882601 | ||
|
|
81946c5092 | ||
|
|
fbb90f9079 | ||
|
|
b62769ccfd | ||
|
|
8942ea574b | ||
|
|
2773445050 | ||
|
|
5b4c228b60 | ||
|
|
fd502446c6 | ||
|
|
5b59662e29 | ||
|
|
1365910610 | ||
|
|
680db83c59 | ||
|
|
f1a41d6d3b | ||
|
|
303689ecbb | ||
|
|
781e520591 | ||
|
|
d65226252d | ||
|
|
8a3e42e4d1 | ||
|
|
0fed0f8d3b | ||
|
|
7b16938b52 | ||
|
|
32885fd36c | ||
|
|
cba8968f17 | ||
|
|
79c3ba2636 | ||
|
|
3dcfbc0fbb | ||
|
|
2569ddd1c7 | ||
|
|
fa2321e9e8 | ||
|
|
1a4abdc4d8 | ||
|
|
e6172cdf90 | ||
|
|
14491ce6c9 | ||
|
|
4cfcc11e20 | ||
|
|
0521895f11 | ||
|
|
f03287856b | ||
|
|
9919a48e9a | ||
|
|
ead1db2be8 | ||
|
|
28070bd32d | ||
|
|
a283ba7247 | ||
|
|
bfad0caa9a | ||
|
|
2a05a74cc9 | ||
|
|
810f3ae12e | ||
|
|
2067c50937 | ||
|
|
f71846628d | ||
|
|
ec872d8c86 | ||
|
|
d1859b4c25 | ||
|
|
256c97c2b7 | ||
|
|
715ade831c | ||
|
|
cee3410532 | ||
|
|
0c93893627 | ||
|
|
475dc34f7a | ||
|
|
c86595d2c6 | ||
|
|
9f6a225caf | ||
|
|
72484c78af | ||
|
|
aa088cb318 | ||
|
|
8ddb436fac | ||
|
|
f9792aa847 | ||
|
|
5709012dfe | ||
|
|
c5f2b55f34 | ||
|
|
8d69b7775b | ||
|
|
170fb2efb8 | ||
|
|
137f3779ea | ||
|
|
ae075ed4c9 | ||
|
|
31d3065e7b | ||
|
|
dcdde6749e | ||
|
|
69d2e10362 | ||
|
|
223459526e | ||
|
|
735136077e | ||
|
|
c963af1f11 | ||
|
|
a20c70cd2b | ||
|
|
51b5e7b2ff | ||
|
|
ce29e31164 | ||
|
|
b9ff56c111 | ||
|
|
141173d3c8 | ||
|
|
38fe521d55 | ||
|
|
35b1cbbdb4 | ||
|
|
3a7ec02598 | ||
|
|
0092867b97 | ||
|
|
1c37c3e699 | ||
|
|
e31b8730da | ||
|
|
2471042d02 | ||
|
|
6581a5bd0f | ||
|
|
048d1ba782 | ||
|
|
29343d1c6f | ||
|
|
b53256ca3f | ||
|
|
acc6c63ebd | ||
|
|
7897da3baf | ||
|
|
cf35596985 | ||
|
|
08c46e50bb | ||
|
|
15373ed7d6 | ||
|
|
b4ba8c4599 | ||
|
|
0780a2a2bc | ||
|
|
c94ce0fb59 | ||
|
|
638689ee32 | ||
|
|
382d19ee94 | ||
|
|
2d28991a70 | ||
|
|
eb2414d012 | ||
|
|
b78d6a7871 | ||
|
|
4d65d2f2df | ||
|
|
a4ec163275 | ||
|
|
b491ba0f58 | ||
|
|
608e03bc11 | ||
|
|
d4e095a576 | ||
|
|
7be803d485 | ||
|
|
f635bf1a2e | ||
|
|
43be2eee14 | ||
|
|
0ffae4ddda | ||
|
|
5d732123d9 | ||
|
|
00ea7635eb | ||
|
|
b92eddc5d2 | ||
|
|
ea3506b7f6 | ||
|
|
b70bf9902b | ||
|
|
0fa829828f | ||
|
|
6c476df24e | ||
|
|
5b3466d06d | ||
|
|
afa975c459 | ||
|
|
f51bdeaec7 | ||
|
|
62735cf07c | ||
|
|
192a85afb8 | ||
|
|
f046b68371 | ||
|
|
901bbf0706 | ||
|
|
3ee53c0cab | ||
|
|
e7e106f74d | ||
|
|
11b0f2b48b | ||
|
|
1c1dd5ec4b | ||
|
|
ed8e63c268 | ||
|
|
5e3636015b | ||
|
|
7db4ca6b93 | ||
|
|
df44104900 | ||
|
|
ad703fb985 | ||
|
|
a590da8231 | ||
|
|
ddfd2b44a1 | ||
|
|
80b0784f50 | ||
|
|
a49e65e151 | ||
|
|
e65535bf00 | ||
|
|
1f1f6849dc | ||
|
|
413949797c | ||
|
|
362708cf35 | ||
|
|
187a0c8817 | ||
|
|
aa3f8e1b34 | ||
|
|
353f425bbf | ||
|
|
db47eeb11c | ||
|
|
037df1d16f | ||
|
|
48595f25fa | ||
|
|
0c92fc0989 | ||
|
|
1e865adaa5 | ||
|
|
c9ad097d85 | ||
|
|
980c28a754 | ||
|
|
06a5414210 | ||
|
|
68aa7e903f | ||
|
|
58bf549964 | ||
|
|
aeb159960c | ||
|
|
be3d33744f | ||
|
|
fd3d945c3a | ||
|
|
580ea74902 | ||
|
|
2d08e98538 | ||
|
|
6385457c10 | ||
|
|
46331f4c7e | ||
|
|
4b5b187730 | ||
|
|
543b9c3668 | ||
|
|
a98eb56fba | ||
|
|
9ccd1ed93c | ||
|
|
b524a1a7dd | ||
|
|
598e460e6c | ||
|
|
3617955bc5 | ||
|
|
2e9ce3cf52 | ||
|
|
e2f42493ac | ||
|
|
253d8712c8 | ||
|
|
db30b9eadc | ||
|
|
02bd33ef59 | ||
|
|
cda7bc9d35 | ||
|
|
8dcbced9c7 | ||
|
|
d34fd10516 | ||
|
|
d27fc67288 | ||
|
|
e0069a10e0 | ||
|
|
e4d075c855 | ||
|
|
051cad4537 | ||
|
|
6b3635aef3 | ||
|
|
def5dc417c | ||
|
|
36ecbb0cd7 | ||
|
|
8e791f7e8b | ||
|
|
851dab7aac | ||
|
|
bb0e51deee | ||
|
|
23bb9fdbd6 | ||
|
|
449cc37896 | ||
|
|
469880fc75 | ||
|
|
c367636d96 | ||
|
|
b239353039 | ||
|
|
f65cef84ca | ||
|
|
893be7dd53 | ||
|
|
c1b3cd7b0a | ||
|
|
83c9ae4927 | ||
|
|
ba8054b611 | ||
|
|
d090a5a1da | ||
|
|
f6f51799d7 | ||
|
|
ac7904668f | ||
|
|
a6c14cb6c9 | ||
|
|
588d402c63 | ||
|
|
98f5da3bf3 | ||
|
|
31e03c1e08 | ||
|
|
e9fdd8fd43 | ||
|
|
dafa2f4d70 | ||
|
|
668c673750 | ||
|
|
8525b913b8 | ||
|
|
cddb6ce033 | ||
|
|
c822beca53 | ||
|
|
73591cd75c | ||
|
|
16fcf08f34 | ||
|
|
52a8f81356 | ||
|
|
1d6b930d9a | ||
|
|
3ee5f7a538 | ||
|
|
aa1d72868b | ||
|
|
4d538dfc0c | ||
|
|
b1945d672d | ||
|
|
60fed00b0e | ||
|
|
600eaeeeca | ||
|
|
a378c93d68 | ||
|
|
b1e5718c8d | ||
|
|
d42206b8f9 | ||
|
|
b9b9654d2c | ||
|
|
e855c16a2f | ||
|
|
5f5ec38585 | ||
|
|
6fb6b3c03c | ||
|
|
d6347a6b6b | ||
|
|
5219f18417 | ||
|
|
647eb075e5 | ||
|
|
733f4a97ea | ||
|
|
252762f410 | ||
|
|
e785cb580b | ||
|
|
ef4b24f068 | ||
|
|
9fbb3266aa | ||
|
|
79d55a5d3b | ||
|
|
ceec69ef65 | ||
|
|
b4ba50d5a4 | ||
|
|
777738a1a1 | ||
|
|
bd19b34088 | ||
|
|
3285d458be | ||
|
|
86368bf985 | ||
|
|
758b9b59c8 | ||
|
|
685dbc622e | ||
|
|
3dfc69e2d9 | ||
|
|
18ce16b3bd | ||
|
|
e7b5a7bf30 | ||
|
|
22525973c7 | ||
|
|
77c0ae64b4 | ||
|
|
3382eca3bd | ||
|
|
a6858556b5 | ||
|
|
1b0124e385 | ||
|
|
8544a49cab | ||
|
|
d5fc8824cf | ||
|
|
0a4b9ec96e | ||
|
|
c557486089 | ||
|
|
1c3de9ea93 | ||
|
|
92fe5782d1 | ||
|
|
0cbcad2c40 | ||
|
|
af2824f216 | ||
|
|
9c9a6f497f | ||
|
|
7dd74e793a | ||
|
|
a5d9696c9c | ||
|
|
0e8738a304 | ||
|
|
21619fe333 | ||
|
|
674b0e0de3 | ||
|
|
effd160dc3 | ||
|
|
30e8663186 | ||
|
|
d1235fea1a | ||
|
|
08dd79ff47 | ||
|
|
6b2d7c6e27 | ||
|
|
87f57bacfc | ||
|
|
1c42e526bd | ||
|
|
be281d64a7 | ||
|
|
055ebe1910 | ||
|
|
65b2d4cc47 | ||
|
|
deafe49247 | ||
|
|
8cfc279fb8 | ||
|
|
b5ade6066e | ||
|
|
ccc598634c | ||
|
|
149f497d1c | ||
|
|
0721ecee63 | ||
|
|
3aea319b7c | ||
|
|
8587cd44ac | ||
|
|
58ce651134 | ||
|
|
4296bd191d | ||
|
|
56015d3174 | ||
|
|
20cdb64f54 | ||
|
|
ab28e5beaa | ||
|
|
19ff49b123 | ||
|
|
08ff30bd91 | ||
|
|
3937fd9763 | ||
|
|
49e387e72b | ||
|
|
e581778d48 | ||
|
|
54d424e787 | ||
|
|
458c0595e5 | ||
|
|
18eac04320 | ||
|
|
74e8810123 | ||
|
|
65639378da | ||
|
|
4242a2f4cf | ||
|
|
10a58ae932 | ||
|
|
bc3bfb1c5f | ||
|
|
1f1f6feeca | ||
|
|
4bea3dd056 | ||
|
|
bc410a862e | ||
|
|
3af31caae1 | ||
|
|
606708b1ec | ||
|
|
2187b72b8e | ||
|
|
4c9631774a | ||
|
|
5c9a3d0715 | ||
|
|
d41bcd864e | ||
|
|
889a05a5e2 | ||
|
|
274e90575a | ||
|
|
464636daa4 | ||
|
|
5921c132e3 | ||
|
|
a28002445a | ||
|
|
2f5d3ea07b |
68 changed files with 11038 additions and 4138 deletions
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
|||
use flake
|
||||
30
.github/ISSUE_TEMPLATE/bug-report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug-report.md
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps / command to reproduce the behavior:
|
||||
```
|
||||
$ crunchy ...
|
||||
```
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Client (please complete the following information):**
|
||||
- OS: [e.g. Windows]
|
||||
- Version [e.g. 3.0.0-dev.8 (17233f2 2023-01-10)] <!-- Version 1 or 2 aren't actively supported anymore. Make sure that the bug occurs on the master branch or a version 3 pre-release -->
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
48
.github/workflow-resources/PKGBUILD.binary
vendored
Normal file
48
.github/workflow-resources/PKGBUILD.binary
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
# Maintainer: ByteDream
|
||||
pkgname=crunchy-cli-bin
|
||||
pkgdesc="Command-line downloader for Crunchyroll"
|
||||
arch=('x86_64' 'aarch64')
|
||||
url="https://github.com/crunchy-labs/crunchy-cli"
|
||||
license=('MIT')
|
||||
|
||||
pkgver=$CI_PKG_VERSION
|
||||
pkgrel=1
|
||||
|
||||
depends=('ffmpeg')
|
||||
provides=('crunchy-cli')
|
||||
conflicts=('crunchy-cli')
|
||||
source_x86_64=(
|
||||
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-x86_64"
|
||||
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
|
||||
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
|
||||
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
|
||||
)
|
||||
source_aarch64=(
|
||||
"crunchy-cli::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-linux-aarch64"
|
||||
"manpages.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-manpages.zip"
|
||||
"completions.zip::https://github.com/crunchy-labs/crunchy-cli/releases/download/v${pkgver}/crunchy-cli-v${pkgver}-completions.zip"
|
||||
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
|
||||
)
|
||||
noextract=("manpages.zip" "completions.zip")
|
||||
sha256sums_x86_64=('$CI_AMD_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
|
||||
sha256sums_aarch64=('$CI_ARM_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
|
||||
|
||||
package() {
|
||||
cd "$srcdir"
|
||||
|
||||
# all files in manpages.zip and completions.zip are stored in root of the archive, makepkg extracts them all to $srcdir
|
||||
# which makes it pretty messy. so the extraction is done manually to keep the content of $srcdir structured
|
||||
mkdir manpages completions
|
||||
cd manpages
|
||||
bsdtar -xf ../manpages.zip
|
||||
cd ../completions
|
||||
bsdtar -xf ../completions.zip
|
||||
cd ..
|
||||
|
||||
install -Dm755 crunchy-cli $pkgdir/usr/bin/crunchy-cli
|
||||
install -Dm644 manpages/* -t $pkgdir/usr/share/man/man1
|
||||
install -Dm644 completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
|
||||
install -Dm644 completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
|
||||
install -Dm644 completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
|
||||
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
|
||||
}
|
||||
46
.github/workflow-resources/PKGBUILD.source
vendored
Normal file
46
.github/workflow-resources/PKGBUILD.source
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
# Maintainer: ByteDream
|
||||
pkgname=crunchy-cli
|
||||
pkgdesc="Command-line downloader for Crunchyroll"
|
||||
arch=('x86_64' 'i686' 'arm' 'armv6h' 'armv7h' 'aarch64')
|
||||
url="https://github.com/crunchy-labs/crunchy-cli"
|
||||
license=('MIT')
|
||||
|
||||
pkgver=$CI_PKG_VERSION
|
||||
pkgrel=1
|
||||
|
||||
depends=('ffmpeg' 'openssl')
|
||||
makedepends=('cargo')
|
||||
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/v${pkgver}.tar.gz")
|
||||
sha256sums=('$CI_SHA_SUM')
|
||||
# lto causes linking errors when executed by this buildscript. besides, lto is already done by cargo itself (which doesn't cause linking errors)
|
||||
options=(!lto)
|
||||
|
||||
prepare() {
|
||||
cd "$srcdir/${pkgname}-$pkgver"
|
||||
|
||||
export RUSTUP_TOOLCHAIN=stable
|
||||
export CARGO_HOME="$srcdir/cargo-home"
|
||||
|
||||
cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')"
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$srcdir/${pkgname}-$pkgver"
|
||||
|
||||
export RUSTUP_TOOLCHAIN=stable
|
||||
export CARGO_HOME="$srcdir/cargo-home"
|
||||
|
||||
export CRUNCHY_CLI_GIT_HASH=$CI_GIT_HASH
|
||||
cargo build --frozen --release
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$srcdir/${pkgname}-$pkgver"
|
||||
|
||||
install -Dm755 target/release/crunchy-cli $pkgdir/usr/bin/crunchy-cli
|
||||
install -Dm644 target/release/manpages/* -t $pkgdir/usr/share/man/man1
|
||||
install -Dm644 target/release/completions/crunchy-cli.bash $pkgdir/usr/share/bash-completion/completions/crunchy-cli
|
||||
install -Dm644 target/release/completions/_crunchy-cli $pkgdir/usr/share/zsh/site-functions/_crunchy-cli
|
||||
install -Dm644 target/release/completions/crunchy-cli.fish $pkgdir/usr/share/fish/vendor_completions.d/crunchy-cli.fish
|
||||
install -Dm644 LICENSE $pkgdir/usr/share/licenses/crunchy-cli/LICENSE
|
||||
}
|
||||
145
.github/workflows/build.yml
vendored
Normal file
145
.github/workflows/build.yml
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- arch: x86_64
|
||||
toolchain: x86_64-unknown-linux-musl
|
||||
- arch: aarch64
|
||||
toolchain: aarch64-unknown-linux-musl
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cargo cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ matrix.toolchain }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Install cross
|
||||
run: cargo install --force cross
|
||||
|
||||
- name: Build
|
||||
run: cross build --locked --release --no-default-features --features openssl-tls-static --target ${{ matrix.toolchain }}
|
||||
|
||||
- name: Upload binary artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: crunchy-cli-linux-${{ matrix.arch }}
|
||||
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload manpages artifact
|
||||
if: ${{ matrix.arch == 'x86_64' }} # only upload the manpages once
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: manpages
|
||||
path: ./target/${{ matrix.toolchain }}/release/manpages
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload completions artifact
|
||||
if: ${{ matrix.arch == 'x86_64' }} # only upload the completions once
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: completions
|
||||
path: ./target/${{ matrix.toolchain }}/release/completions
|
||||
if-no-files-found: error
|
||||
|
||||
build-mac:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
# macos-13 uses x86_64, macos-14 aarch64
|
||||
# see https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
|
||||
include:
|
||||
- os: macos-13
|
||||
arch: x86_64
|
||||
toolchain: x86_64-apple-darwin
|
||||
- os: macos-14
|
||||
arch: aarch64
|
||||
toolchain: aarch64-apple-darwin
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cargo cache
|
||||
if: ${{ matrix.os != 'macos-13' }} # when using cache, the 'Setup Rust' step fails for macos 13
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: x86_64-apple-darwin-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Build
|
||||
run: cargo build --locked --release --target ${{ matrix.toolchain }}
|
||||
|
||||
- name: Upload binary artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: crunchy-cli-darwin-${{ matrix.arch }}
|
||||
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
|
||||
if-no-files-found: error
|
||||
|
||||
build-windows:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cargo cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: x86_64-pc-windows-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install system dependencies
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
update: true
|
||||
install: mingw-w64-x86_64-rust base-devel
|
||||
|
||||
- name: Build
|
||||
shell: msys2 {0}
|
||||
run: cargo build --locked --release --target x86_64-pc-windows-gnu
|
||||
|
||||
- name: Upload binary artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: crunchy-cli-windows-x86_64
|
||||
path: ./target/x86_64-pc-windows-gnu/release/crunchy-cli.exe
|
||||
if-no-files-found: error
|
||||
58
.github/workflows/lint.yml
vendored
Normal file
58
.github/workflows/lint.yml
vendored
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
name: lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cargo cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Check fmt
|
||||
run: cargo fmt --check
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cargo cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Lint
|
||||
run: cargo clippy -- -D warnings
|
||||
74
.github/workflows/publish.yml
vendored
Normal file
74
.github/workflows/publish.yml
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
name: publish
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
publish-aur:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get version
|
||||
run: echo "RELEASE_VERSION=$(echo ${{ github.ref_name }} | cut -c 2-)" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate crunchy-cli sha sum
|
||||
run: |
|
||||
curl -LO https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/${{ github.ref_name }}.tar.gz
|
||||
echo "CRUNCHY_CLI_SHA256=$(sha256sum ${{ github.ref_name }}.tar.gz | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
|
||||
- name: Get release commit hash
|
||||
run: echo "CRUNCHY_CLI_GIT_HASH=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate crunchy-cli PKGBUILD
|
||||
env:
|
||||
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
CI_SHA_SUM: ${{ env.CRUNCHY_CLI_SHA256 }}
|
||||
CI_GIT_HASH: ${{ env.CRUNCHY_CLI_GIT_HASH }}
|
||||
run: envsubst '$CI_PKG_VERSION,$CI_SHA_SUM,$CI_GIT_HASH' < .github/workflow-resources/PKGBUILD.source > PKGBUILD
|
||||
|
||||
- name: Publish crunchy-cli to AUR
|
||||
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
|
||||
with:
|
||||
pkgname: crunchy-cli
|
||||
pkgbuild: ./PKGBUILD
|
||||
commit_username: release-action
|
||||
commit_email: ${{ secrets.AUR_EMAIL }}
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
commit_message: Update to version ${{ env.RELEASE_VERSION }}
|
||||
|
||||
- name: Generate crunchy-cli-bin sha sums
|
||||
run: |
|
||||
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-x86_64
|
||||
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-linux-aarch64
|
||||
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-completions.zip
|
||||
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-manpages.zip
|
||||
curl -LO https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/${{ github.ref_name }}/LICENSE
|
||||
echo "CRUNCHY_CLI_BIN_x86_64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-x86_64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
echo "CRUNCHY_CLI_BIN_aarch64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-aarch64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
echo "CRUNCHY_CLI_BIN_COMPLETIONS_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-completions.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
echo "CRUNCHY_CLI_BIN_MANPAGES_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-manpages.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
echo "CRUNCHY_CLI_BIN_LICENSE_SHA256=$(sha256sum LICENSE | cut -f 1 -d ' ')" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate crunchy-cli-bin PKGBUILD
|
||||
env:
|
||||
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
CI_AMD_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_x86_64_SHA256 }}
|
||||
CI_ARM_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_aarch64_SHA256 }}
|
||||
CI_MANPAGES_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_MANPAGES_SHA256 }}
|
||||
CI_COMPLETIONS_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_COMPLETIONS_SHA256 }}
|
||||
CI_LICENSE_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_LICENSE_SHA256 }}
|
||||
run: envsubst '$CI_PKG_VERSION,$CI_AMD_BINARY_SHA_SUM,$CI_ARM_BINARY_SHA_SUM,$CI_COMPLETIONS_SHA_SUM,$CI_MANPAGES_SHA_SUM,$CI_LICENSE_SHA_SUM' < .github/workflow-resources/PKGBUILD.binary > PKGBUILD
|
||||
|
||||
- name: Publish crunchy-cli-bin to AUR
|
||||
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
|
||||
with:
|
||||
pkgname: crunchy-cli-bin
|
||||
pkgbuild: ./PKGBUILD
|
||||
commit_username: release-action
|
||||
commit_email: ${{ secrets.AUR_EMAIL }}
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
commit_message: Update to version ${{ env.RELEASE_VERSION }}
|
||||
10
.gitignore
vendored
Normal file
10
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# Rust
|
||||
/target
|
||||
|
||||
# Editor
|
||||
/.idea
|
||||
/.vscode
|
||||
|
||||
# Nix
|
||||
/result
|
||||
/.direnv
|
||||
2506
Cargo.lock
generated
Normal file
2506
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
42
Cargo.toml
Normal file
42
Cargo.toml
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
[package]
|
||||
name = "crunchy-cli"
|
||||
authors = ["Crunchy Labs Maintainers"]
|
||||
version = "3.6.7"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[features]
|
||||
default = ["native-tls"]
|
||||
|
||||
rustls-tls = ["crunchy-cli-core/rustls-tls"]
|
||||
native-tls = ["crunchy-cli-core/native-tls"]
|
||||
openssl-tls = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls"]
|
||||
openssl-tls-static = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls-static"]
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.38", features = ["macros", "rt-multi-thread", "time"], default-features = false }
|
||||
|
||||
native-tls-crate = { package = "native-tls", version = "0.2.12", optional = true }
|
||||
|
||||
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
||||
|
||||
[build-dependencies]
|
||||
chrono = "0.4"
|
||||
clap = { version = "4.5", features = ["string"] }
|
||||
clap_complete = "4.5"
|
||||
clap_mangen = "0.2"
|
||||
|
||||
crunchy-cli-core = { path = "./crunchy-cli-core" }
|
||||
|
||||
[workspace]
|
||||
members = ["crunchy-cli-core"]
|
||||
|
||||
[patch.crates-io]
|
||||
# fork of the `native-tls` crate which can use openssl as backend on every platform. this is done as `reqwest` only
|
||||
# supports `rustls` and `native-tls` as tls backend
|
||||
native-tls = { git = "https://github.com/crunchy-labs/rust-not-so-native-tls.git", rev = "c7ac566" }
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
opt-level = "z"
|
||||
lto = true
|
||||
80
LICENSE
80
LICENSE
|
|
@ -1,61 +1,25 @@
|
|||
Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Copyright (c) 2023-NOW Crunchy Labs Team
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below.
|
||||
0. Additional Definitions.
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License.
|
||||
|
||||
“The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below.
|
||||
|
||||
An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library.
|
||||
|
||||
A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”.
|
||||
|
||||
The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version.
|
||||
|
||||
The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work.
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL.
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or
|
||||
b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License.
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License.
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license document.
|
||||
c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document.
|
||||
d) Do one of the following:
|
||||
0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.
|
||||
1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version.
|
||||
e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License.
|
||||
b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
|
|
|||
17
Makefile
17
Makefile
|
|
@ -1,17 +0,0 @@
|
|||
VERSION=1.0.1
|
||||
BINARY_NAME=crunchy
|
||||
VERSION_BINARY_NAME=$(BINARY_NAME)-v$(VERSION)
|
||||
|
||||
build:
|
||||
cd cmd/crunchyroll-go && go build -o $(BINARY_NAME)
|
||||
mv cmd/crunchyroll-go/$(BINARY_NAME) .
|
||||
|
||||
test:
|
||||
go test -v .
|
||||
|
||||
release:
|
||||
cd cmd/crunchyroll-go && GOOS=linux GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_linux
|
||||
cd cmd/crunchyroll-go && GOOS=windows GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_windows.exe
|
||||
cd cmd/crunchyroll-go && GOOS=darwin GOARCH=amd64 go build -o $(VERSION_BINARY_NAME)_darwin
|
||||
|
||||
mv cmd/crunchyroll-go/$(VERSION_BINARY_NAME)_* .
|
||||
122
build.rs
Normal file
122
build.rs
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
use clap::{Command, CommandFactory};
|
||||
use clap_complete::shells;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn main() -> std::io::Result<()> {
|
||||
let rustls_tls = cfg!(feature = "rustls-tls");
|
||||
let native_tls = cfg!(feature = "native-tls");
|
||||
let openssl_tls = cfg!(any(feature = "openssl-tls", feature = "openssl-tls-static"));
|
||||
|
||||
if rustls_tls as u8 + native_tls as u8 + openssl_tls as u8 > 1 {
|
||||
let active_tls_backend = if openssl_tls {
|
||||
"openssl"
|
||||
} else if native_tls {
|
||||
"native tls"
|
||||
} else {
|
||||
"rustls"
|
||||
};
|
||||
|
||||
println!("cargo:warning=Multiple tls backends are activated (through the '*-tls' features). Consider to activate only one as it is not possible to change the backend during runtime. The active backend for this build will be '{}'.", active_tls_backend)
|
||||
}
|
||||
|
||||
// note that we're using an anti-pattern here / violate the rust conventions. build script are
|
||||
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
|
||||
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since
|
||||
// its nested in sub directories and is difficult to find (at least more difficult than in the
|
||||
// build root)
|
||||
let unconventional_out_dir =
|
||||
std::path::PathBuf::from(std::env::var_os("OUT_DIR").ok_or(std::io::ErrorKind::NotFound)?)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
|
||||
let completions_dir = exist_or_create_dir(unconventional_out_dir.join("completions"))?;
|
||||
let manpage_dir = exist_or_create_dir(unconventional_out_dir.join("manpages"))?;
|
||||
|
||||
generate_completions(completions_dir)?;
|
||||
generate_manpages(manpage_dir)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exist_or_create_dir(path: PathBuf) -> std::io::Result<PathBuf> {
|
||||
if !path.exists() {
|
||||
std::fs::create_dir(path.clone())?
|
||||
}
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
fn generate_completions(out_dir: PathBuf) -> std::io::Result<()> {
|
||||
let mut command: Command = crunchy_cli_core::Cli::command();
|
||||
|
||||
clap_complete::generate_to(
|
||||
shells::Bash,
|
||||
&mut command.clone(),
|
||||
"crunchy-cli",
|
||||
out_dir.clone(),
|
||||
)?;
|
||||
clap_complete::generate_to(
|
||||
shells::Elvish,
|
||||
&mut command.clone(),
|
||||
"crunchy-cli",
|
||||
out_dir.clone(),
|
||||
)?;
|
||||
println!(
|
||||
"{}",
|
||||
clap_complete::generate_to(
|
||||
shells::Fish,
|
||||
&mut command.clone(),
|
||||
"crunchy-cli",
|
||||
out_dir.clone(),
|
||||
)?
|
||||
.to_string_lossy()
|
||||
);
|
||||
clap_complete::generate_to(
|
||||
shells::PowerShell,
|
||||
&mut command.clone(),
|
||||
"crunchy-cli",
|
||||
out_dir.clone(),
|
||||
)?;
|
||||
clap_complete::generate_to(shells::Zsh, &mut command, "crunchy-cli", out_dir)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate_manpages(out_dir: PathBuf) -> std::io::Result<()> {
|
||||
fn generate_command_manpage(
|
||||
mut command: Command,
|
||||
base_path: &Path,
|
||||
sub_name: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let (file_name, title) = if sub_name.is_empty() {
|
||||
command = command.name("crunchy-cli");
|
||||
("crunchy-cli.1".to_string(), "crunchy-cli".to_string())
|
||||
} else {
|
||||
command = command.name(format!("crunchy-cli {}", sub_name));
|
||||
(
|
||||
format!("crunchy-cli-{}.1", sub_name),
|
||||
format!("crunchy-cli-{}", sub_name),
|
||||
)
|
||||
};
|
||||
|
||||
let mut command_buf = vec![];
|
||||
let man = clap_mangen::Man::new(command)
|
||||
.title(title)
|
||||
.date(chrono::Utc::now().format("%b %d, %Y").to_string());
|
||||
man.render(&mut command_buf)?;
|
||||
|
||||
std::fs::write(base_path.join(file_name), command_buf)
|
||||
}
|
||||
|
||||
generate_command_manpage(crunchy_cli_core::Cli::command(), &out_dir, "")?;
|
||||
generate_command_manpage(crunchy_cli_core::Archive::command(), &out_dir, "archive")?;
|
||||
generate_command_manpage(crunchy_cli_core::Download::command(), &out_dir, "download")?;
|
||||
generate_command_manpage(crunchy_cli_core::Login::command(), &out_dir, "login")?;
|
||||
generate_command_manpage(crunchy_cli_core::Search::command(), &out_dir, "search")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,574 +0,0 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"github.com/ByteDream/crunchyroll-go/utils"
|
||||
"github.com/grafov/m3u8"
|
||||
"github.com/spf13/cobra"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// sigusr1 is actually syscall.SIGUSR1, but because has no signal (or very less) it has to be defined manually
|
||||
var sigusr1 = syscall.Signal(0xa)
|
||||
|
||||
var (
|
||||
audioFlag string
|
||||
subtitleFlag string
|
||||
noHardsubFlag bool
|
||||
|
||||
directoryFlag string
|
||||
outputFlag string
|
||||
|
||||
resolutionFlag string
|
||||
|
||||
alternativeProgressFlag bool
|
||||
)
|
||||
|
||||
var cleanup [2]string
|
||||
|
||||
var getCmd = &cobra.Command{
|
||||
Use: "download",
|
||||
Short: "Download a video",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
loadCrunchy()
|
||||
download(args)
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(getCmd)
|
||||
getCmd.Flags().StringVar(&audioFlag, "audio", "", "The locale of the audio. Available locales: "+strings.Join(allLocalesAsStrings(), ", "))
|
||||
getCmd.Flags().StringVar(&subtitleFlag, "subtitle", "", "The locale of the subtitle. Available locales: "+strings.Join(allLocalesAsStrings(), ", "))
|
||||
getCmd.Flags().BoolVar(&noHardsubFlag, "no-hardsub", false, "Same as `--sub`, but the subtitles are not stored in the video itself, but in a separate file")
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
getCmd.Flags().StringVarP(&directoryFlag, "directory", "d", cwd, "The directory to download the file to")
|
||||
getCmd.Flags().StringVarP(&outputFlag, "output", "o", "{{.Title}}.ts", "Name of the output file\n"+
|
||||
"If you use the following things in the name, the will get replaced"+
|
||||
"\t{{.Title}} » Title of the video\n"+
|
||||
"\t{{.Resolution}} » Resolution of the video\n"+
|
||||
"\t{{.FPS}} » Frame Rate of the video\n"+
|
||||
"\t{{.Audio}} » Audio locale of the video\n"+
|
||||
"\t{{.Subtitle}} » Subtitle locale of the video\n")
|
||||
|
||||
getCmd.Flags().StringVarP(&resolutionFlag, "resolution", "r", "best", "res")
|
||||
|
||||
getCmd.Flags().BoolVar(&alternativeProgressFlag, "alternative-progress", false, "Shows an alternative, not so user-friendly progress instead of the progress bar")
|
||||
}
|
||||
|
||||
type information struct {
|
||||
Title string `json:"title"`
|
||||
OriginalURL string `json:"original_url"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
Resolution string `json:"resolution"`
|
||||
FPS float64 `json:"fps"`
|
||||
Audio crunchyroll.LOCALE `json:"audio"`
|
||||
Subtitle crunchyroll.LOCALE `json:"subtitle"`
|
||||
Hardsub bool `json:"hardsub"`
|
||||
}
|
||||
|
||||
func download(urls []string) {
|
||||
if path.Ext(outputFlag) != ".ts" && !hasFFmpeg() {
|
||||
out.Fatalf("The file ending for the output file (%s) is not `.ts`. "+
|
||||
"Install ffmpeg (https://ffmpeg.org/download.html) use other media file endings (e.g. `.mp4`)\n", outputFlag)
|
||||
}
|
||||
|
||||
var allFormats []*crunchyroll.Format
|
||||
var allTitles []string
|
||||
var allURLs []string
|
||||
|
||||
for i, url := range urls {
|
||||
var failed bool
|
||||
|
||||
out.StartProgressf("Parsing url %d", i+1)
|
||||
if video, err1 := crunchy.FindVideo(url); err1 == nil {
|
||||
out.Debugf("Pre-parsed url %d as video\n", i+1)
|
||||
if formats, titles := parseVideo(video, url); formats != nil {
|
||||
allFormats = append(allFormats, formats...)
|
||||
allTitles = append(allTitles, titles...)
|
||||
for range formats {
|
||||
allURLs = append(allURLs, url)
|
||||
}
|
||||
} else {
|
||||
failed = true
|
||||
}
|
||||
} else if episodes, err2 := crunchy.FindEpisode(url); err2 == nil {
|
||||
out.Debugf("Parsed url %d as episode\n", i+1)
|
||||
out.Debugf("Found %d episode types\n", len(episodes))
|
||||
if format, title := parseEpisodes(episodes, url); format != nil {
|
||||
allFormats = append(allFormats, format)
|
||||
allTitles = append(allTitles, title)
|
||||
allURLs = append(allURLs, url)
|
||||
} else {
|
||||
failed = true
|
||||
}
|
||||
} else {
|
||||
out.EndProgressf(false, "Could not parse url %d, skipping\n", i+1)
|
||||
out.Debugf("Parse error 1: %s\n", err1)
|
||||
out.Debugf("Parse error 2: %s\n", err2)
|
||||
continue
|
||||
}
|
||||
|
||||
if !failed {
|
||||
out.EndProgressf(true, "Parsed url %d successful\n", i+1)
|
||||
} else {
|
||||
out.EndProgressf(false, "Failed to parse url %d (the url is valid but some kind of error which is surely shown caused the failure)", i+1)
|
||||
}
|
||||
}
|
||||
out.Debugf("%d of %d urls could be parsed\n", len(allURLs), len(urls))
|
||||
|
||||
out.Empty()
|
||||
if len(allFormats) == 0 {
|
||||
out.Fatalf("Nothing to download, aborting\n")
|
||||
}
|
||||
out.Infof("Downloads:")
|
||||
for i, format := range allFormats {
|
||||
video := format.Video
|
||||
out.Infof("\t%d. %s » %spx, %.2f FPS, %s audio\n", i+1, allTitles[i], video.Resolution, video.FrameRate, utils.LocaleLanguage(format.AudioLocale))
|
||||
}
|
||||
var tmpl *template.Template
|
||||
var err error
|
||||
tmpl, err = template.New("").Parse(outputFlag)
|
||||
if err == nil {
|
||||
var buff bytes.Buffer
|
||||
if err := tmpl.Execute(&buff, allFormats[0].Video); err == nil {
|
||||
if buff.String() == outputFlag {
|
||||
tmpl = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fileInfo, stat := os.Stat(directoryFlag); err == nil {
|
||||
if !fileInfo.IsDir() {
|
||||
out.Fatalf("%s (given from the `-d`/`--directory` flag) is not a directory\n", directoryFlag)
|
||||
}
|
||||
} else if os.IsNotExist(stat) {
|
||||
if err := os.MkdirAll(directoryFlag, 0777); err != nil {
|
||||
out.Fatalf("Failed to create directory which was given from the `-d`/`--directory` flag: %s\n", err)
|
||||
}
|
||||
} else {
|
||||
out.Fatalf("Failed to get information for via `-d`/`--directory` flag the given file / directory: %s", err)
|
||||
}
|
||||
|
||||
var success int
|
||||
for i, format := range allFormats {
|
||||
var subtitle crunchyroll.LOCALE
|
||||
if subtitleFlag != "" {
|
||||
subtitle = localeToLOCALE(subtitleFlag)
|
||||
}
|
||||
info := information{
|
||||
Title: allTitles[i],
|
||||
OriginalURL: allURLs[i],
|
||||
DownloadURL: format.Video.URI,
|
||||
Resolution: format.Video.Resolution,
|
||||
FPS: format.Video.FrameRate,
|
||||
Audio: format.AudioLocale,
|
||||
Subtitle: subtitle,
|
||||
}
|
||||
|
||||
if verboseFlag {
|
||||
fmtOptionsBytes, err := json.Marshal(info)
|
||||
if err != nil {
|
||||
fmtOptionsBytes = make([]byte, 0)
|
||||
}
|
||||
out.Debugf("Information (json): %s", string(fmtOptionsBytes))
|
||||
}
|
||||
|
||||
var baseFilename string
|
||||
if tmpl != nil {
|
||||
var buff bytes.Buffer
|
||||
if err := tmpl.Execute(&buff, info); err == nil {
|
||||
baseFilename = buff.String()
|
||||
} else {
|
||||
out.Fatalf("Could not convert filename (%s), aborting\n", err)
|
||||
}
|
||||
} else {
|
||||
baseFilename = outputFlag
|
||||
}
|
||||
|
||||
out.Empty()
|
||||
if downloadFormat(format, directoryFlag, baseFilename, info) {
|
||||
success++
|
||||
}
|
||||
}
|
||||
|
||||
out.Empty()
|
||||
out.Infof("Downloaded %d out of %d videos successful\n", success, len(allFormats))
|
||||
}
|
||||
|
||||
func parseVideo(video crunchyroll.Video, url string) (parsedFormats []*crunchyroll.Format, titles []string) {
|
||||
var rootTitle string
|
||||
var orderedFormats [][]*crunchyroll.Format
|
||||
var videoStructure utils.VideoStructure
|
||||
|
||||
switch video.(type) {
|
||||
case *crunchyroll.Series:
|
||||
out.Debugf("Parsed url as series\n")
|
||||
series := video.(*crunchyroll.Series)
|
||||
seasons, err := series.Seasons()
|
||||
if err != nil {
|
||||
out.Errf("Could not get any season of %s (%s): %s. Aborting\n", series.Title, url, err.Error())
|
||||
return
|
||||
}
|
||||
out.Debugf("Found %d seasons\n", len(seasons))
|
||||
seasonsStructure := utils.NewSeasonStructure(seasons)
|
||||
if err := seasonsStructure.InitAll(); err != nil {
|
||||
out.Errf("Failed to initialize %s (%s): %s. Aborting\n", series.Title, url, err.Error())
|
||||
return
|
||||
}
|
||||
out.Debugf("Initialized %s\n", series.Title)
|
||||
|
||||
rootTitle = series.Title
|
||||
orderedFormats, _ = seasonsStructure.OrderFormatsByEpisodeNumber()
|
||||
videoStructure = seasonsStructure.EpisodeStructure
|
||||
case *crunchyroll.Movie:
|
||||
out.Debugf("Parsed url as movie\n")
|
||||
movie := video.(*crunchyroll.Movie)
|
||||
movieListings, err := movie.MovieListing()
|
||||
if err != nil {
|
||||
out.Errf("Failed to get movie of %s (%s)\n", movie.Title, url)
|
||||
return
|
||||
}
|
||||
out.Debugf("Parsed %d movie listenings\n", len(movieListings))
|
||||
movieListingStructure := utils.NewMovieListingStructure(movieListings)
|
||||
if err := movieListingStructure.InitAll(); err != nil {
|
||||
out.Errf("Failed to initialize %s (%s): %s. Aborting\n", movie.Title, url, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
rootTitle = movie.Title
|
||||
unorderedFormats, _ := movieListingStructure.Formats()
|
||||
orderedFormats = append(orderedFormats, unorderedFormats)
|
||||
videoStructure = movieListingStructure
|
||||
}
|
||||
|
||||
// out.Debugf("Found %d formats\n", len(unorderedFormats))
|
||||
out.Debugf("Found %d different episodes\n", len(orderedFormats))
|
||||
|
||||
for j, formats := range orderedFormats {
|
||||
if format := findFormat(formats); format != nil {
|
||||
var title string
|
||||
switch videoStructure.(type) {
|
||||
case *utils.EpisodeStructure:
|
||||
episode, _ := videoStructure.(*utils.EpisodeStructure).GetEpisodeByFormat(format)
|
||||
title = episode.Title
|
||||
case *utils.MovieListingStructure:
|
||||
movieListing, _ := videoStructure.(*utils.MovieListingStructure).GetMovieListingByFormat(format)
|
||||
title = movieListing.Title
|
||||
}
|
||||
|
||||
parsedFormats = append(parsedFormats, format)
|
||||
titles = append(titles, title)
|
||||
out.Debugf("Successful parsed format %d for %s\n", j+1, rootTitle)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parseEpisodes(episodes []*crunchyroll.Episode, url string) (*crunchyroll.Format, string) {
|
||||
episodeStructure := utils.NewEpisodeStructure(episodes)
|
||||
if err := episodeStructure.InitAll(); err != nil {
|
||||
out.EndProgressf(false, "Failed to initialize %s (%s): %s, skipping\n", episodes[0].Title, url, err)
|
||||
return nil, ""
|
||||
}
|
||||
|
||||
formats, _ := episodeStructure.Formats()
|
||||
out.Debugf("Found %d formats\n", len(formats))
|
||||
if format := findFormat(formats); format != nil {
|
||||
episode, _ := episodeStructure.GetEpisodeByFormat(format)
|
||||
return format, episode.Title
|
||||
}
|
||||
return nil, ""
|
||||
}
|
||||
|
||||
func findFormat(formats []*crunchyroll.Format) (format *crunchyroll.Format) {
|
||||
formatStructure := utils.NewFormatStructure(formats)
|
||||
var audioLocale, subtitleLocale crunchyroll.LOCALE
|
||||
|
||||
if audioFlag != "" {
|
||||
audioLocale = localeToLOCALE(audioFlag)
|
||||
} else {
|
||||
audioLocale = localeToLOCALE(systemLocale())
|
||||
}
|
||||
if subtitleFlag != "" {
|
||||
subtitleLocale = localeToLOCALE(subtitleFlag)
|
||||
}
|
||||
|
||||
if audioFlag == "" {
|
||||
var dubOk bool
|
||||
availableDub, _, _, _ := formatStructure.AvailableLocales(true)
|
||||
for _, dub := range availableDub {
|
||||
if dub == audioLocale {
|
||||
dubOk = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !dubOk {
|
||||
if audioFlag != systemLocale() {
|
||||
out.EndProgressf(false, "No stream with audio locale `%s` is available, skipping\n", audioLocale)
|
||||
return nil
|
||||
}
|
||||
out.Errf("No stream with default audio locale `%s` is available, using hardsubbed %s with subtitle locale %s\n", audioLocale, crunchyroll.JP, systemLocale())
|
||||
audioLocale = crunchyroll.JP
|
||||
if subtitleFlag == "" {
|
||||
subtitleLocale = localeToLOCALE(systemLocale())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var dubOk, subOk bool
|
||||
availableDub, availableSub, _, _ := formatStructure.AvailableLocales(true)
|
||||
for _, dub := range availableDub {
|
||||
if dub == audioLocale {
|
||||
dubOk = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !dubOk {
|
||||
if audioFlag == "" {
|
||||
audioLocale = crunchyroll.JP
|
||||
if subtitleFlag == "" {
|
||||
subtitleLocale = localeToLOCALE(systemLocale())
|
||||
out.Errf("No stream with default audio locale `%s` is available, using hardsubbed %s with subtitle locale %s\n", audioLocale, crunchyroll.JP, subtitleLocale)
|
||||
}
|
||||
}
|
||||
for _, dub := range availableDub {
|
||||
if dub == audioLocale {
|
||||
dubOk = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if subtitleLocale != "" {
|
||||
for _, sub := range availableSub {
|
||||
if sub == subtitleLocale {
|
||||
subOk = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
subOk = true
|
||||
}
|
||||
|
||||
if !dubOk {
|
||||
out.Errf("Could not find any video with `%s` audio locale\n", audioLocale)
|
||||
}
|
||||
if !subOk {
|
||||
out.Errf("Could not find any video with `%s` subtitle locale\n", subtitleLocale)
|
||||
}
|
||||
if !dubOk || !subOk {
|
||||
return nil
|
||||
}
|
||||
|
||||
formats, err := formatStructure.FilterFormatsByLocales(audioLocale, subtitleLocale, !noHardsubFlag)
|
||||
if err != nil {
|
||||
out.Errln("Failed to get matching format. Try to change the `--audio` or `--subtitle` flag")
|
||||
return
|
||||
}
|
||||
|
||||
if resolutionFlag == "best" || resolutionFlag == "" {
|
||||
sort.Sort(sort.Reverse(utils.FormatsByResolution(formats)))
|
||||
format = formats[0]
|
||||
} else if resolutionFlag == "worst" {
|
||||
sort.Sort(utils.FormatsByResolution(formats))
|
||||
format = formats[0]
|
||||
} else {
|
||||
for _, f := range formats {
|
||||
if f.Video.Resolution == resolutionFlag {
|
||||
format = f
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
subtitleFlag = string(subtitleLocale)
|
||||
return
|
||||
}
|
||||
|
||||
func downloadFormat(format *crunchyroll.Format, dir, fname string, info information) bool {
|
||||
filename := freeFileName(filepath.Join(dir, fname))
|
||||
ext := path.Ext(filename)
|
||||
out.Debugf("Download filename: %s\n", filename)
|
||||
if filename != filepath.Join(dir, fname) {
|
||||
out.Errf("The file %s already exist, renaming the download file to %s\n", filepath.Join(dir, fname), filename)
|
||||
}
|
||||
if ext != ".ts" {
|
||||
if !hasFFmpeg() {
|
||||
out.Fatalf("The file ending for the output file (%s) is not `.ts`. "+
|
||||
"Install ffmpeg (https://ffmpeg.org/download.html) use other media file endings (e.g. `.mp4`)\n", filename)
|
||||
}
|
||||
out.Debugf("File will be converted via ffmpeg")
|
||||
}
|
||||
var subtitleFilename string
|
||||
if noHardsubFlag {
|
||||
subtitle, ok := utils.SubtitleByLocale(format, info.Subtitle)
|
||||
if !ok {
|
||||
out.Errf("Failed to get %s subtitles\n", info.Subtitle)
|
||||
return false
|
||||
}
|
||||
subtitleFilename = freeFileName(filepath.Join(dir, fmt.Sprintf("%s.%s", strings.TrimRight(path.Base(filename), ext), subtitle.Format)))
|
||||
out.Debugf("Subtitles will be saved as `%s`\n", subtitleFilename)
|
||||
}
|
||||
|
||||
out.Infof("Downloading `%s` (%s) as `%s`\n", info.Title, info.OriginalURL, filename)
|
||||
out.Infof("Audio: %s\n", info.Audio)
|
||||
out.Infof("Subtitle: %s\n", info.Subtitle)
|
||||
out.Infof("Hardsub: %v\n", format.Hardsub != "")
|
||||
out.Infof("Resolution: %s\n", info.Resolution)
|
||||
out.Infof("FPS: %.2f\n", info.FPS)
|
||||
|
||||
var err error
|
||||
if ext == ".ts" {
|
||||
file, err := os.Create(filename)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
out.Errf("Could not create file `%s` to download episode `%s` (%s): %s, skipping\n", filename, info.Title, info.OriginalURL, err)
|
||||
return false
|
||||
}
|
||||
cleanup[0] = filename
|
||||
|
||||
// removes all files in case of an unexpected exit
|
||||
sigs := make(chan os.Signal)
|
||||
signal.Notify(sigs, os.Interrupt, syscall.SIGTERM, sigusr1)
|
||||
go func() {
|
||||
sig := <-sigs
|
||||
os.RemoveAll(cleanup[1])
|
||||
switch sig {
|
||||
case os.Interrupt, syscall.SIGTERM:
|
||||
os.Remove(cleanup[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
err = format.Download(file, downloadProgress)
|
||||
// newline to avoid weird output
|
||||
fmt.Println()
|
||||
|
||||
// make the goroutine stop
|
||||
sigs <- sigusr1
|
||||
} else {
|
||||
tempDir, err := os.MkdirTemp("", "crunchy_")
|
||||
if err != nil {
|
||||
out.Errln("Failed to create temp download dir. Skipping")
|
||||
return false
|
||||
}
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, os.Interrupt, syscall.SIGTERM, sigusr1)
|
||||
go func() {
|
||||
sig := <-sigs
|
||||
os.RemoveAll(tempDir)
|
||||
switch sig {
|
||||
case os.Interrupt, syscall.SIGTERM:
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
var segmentCount int
|
||||
err = format.DownloadSegments(tempDir, 4, func(segment *m3u8.MediaSegment, current, total int, file *os.File, err error) error {
|
||||
segmentCount++
|
||||
return downloadProgress(segment, current, total, file, err)
|
||||
})
|
||||
// newline to avoid weird output
|
||||
fmt.Println()
|
||||
|
||||
f, _ := os.CreateTemp("", "*.txt")
|
||||
for i := 0; i < segmentCount; i++ {
|
||||
fmt.Fprintf(f, "file '%s.ts'\n", filepath.Join(tempDir, strconv.Itoa(i)))
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
f.Close()
|
||||
|
||||
cmd := exec.Command("ffmpeg",
|
||||
"-f", "concat",
|
||||
"-safe", "0",
|
||||
"-i", f.Name(),
|
||||
"-c", "copy",
|
||||
filename)
|
||||
err = cmd.Run()
|
||||
|
||||
sigs <- sigusr1
|
||||
}
|
||||
if err != nil {
|
||||
out.Errln("Failed to download video, skipping")
|
||||
} else {
|
||||
if info.Subtitle == "" {
|
||||
out.Infof("Downloaded `%s` as `%s` with %s audio locale\n", info.Title, filename, strings.ToLower(utils.LocaleLanguage(info.Audio)))
|
||||
} else {
|
||||
out.Infof("Downloaded `%s` as `%s` with %s audio locale and %s subtitle locale\n", info.Title, filename, strings.ToLower(utils.LocaleLanguage(info.Audio)), strings.ToLower(utils.LocaleLanguage(info.Subtitle)))
|
||||
if subtitleFilename != "" {
|
||||
file, err := os.Create(subtitleFilename)
|
||||
if err != nil {
|
||||
out.Errf("Failed to download subtitles: %s\n", err)
|
||||
return false
|
||||
} else {
|
||||
subtitle, ok := utils.SubtitleByLocale(format, info.Subtitle)
|
||||
if !ok {
|
||||
out.Errf("Failed to get %s subtitles\n", info.Subtitle)
|
||||
return false
|
||||
}
|
||||
if err := subtitle.Download(file); err != nil {
|
||||
out.Errf("Failed to download subtitles: %s\n", err)
|
||||
return false
|
||||
}
|
||||
out.Infof("Downloaded `%s` subtitles to `%s`\n", info.Subtitle, subtitleFilename)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func downloadProgress(segment *m3u8.MediaSegment, current, total int, file *os.File, err error) error {
|
||||
if cleanup[1] == "" && file != nil {
|
||||
cleanup[1] = path.Dir(file.Name())
|
||||
}
|
||||
|
||||
if !quietFlag {
|
||||
percentage := float32(current) / float32(total) * 100
|
||||
if alternativeProgressFlag {
|
||||
out.Infof("Downloading %d/%d (%.2f%%) » %s", current, total, percentage, segment.URI)
|
||||
} else {
|
||||
progressWidth := float32(terminalWidth() - (14 + len(out.InfoLog.Prefix())) - (len(fmt.Sprint(total)))*2)
|
||||
|
||||
repeatCount := int(percentage / (float32(100) / progressWidth))
|
||||
// it can be lower than zero when the terminal is very tiny
|
||||
if repeatCount < 0 {
|
||||
repeatCount = 0
|
||||
}
|
||||
|
||||
// alternative:
|
||||
// progressPercentage := strings.Repeat("█", repeatCount)
|
||||
progressPercentage := (strings.Repeat("=", repeatCount) + ">")[1:]
|
||||
|
||||
fmt.Printf("\r%s[%-"+fmt.Sprint(progressWidth)+"s]%4d%% %8d/%d", out.InfoLog.Prefix(), progressPercentage, int(percentage), current, total)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func freeFileName(filename string) string {
|
||||
ext := path.Ext(filename)
|
||||
base := strings.TrimRight(filename, ext)
|
||||
for j := 0; ; j++ {
|
||||
if _, stat := os.Stat(filename); stat != nil && !os.IsExist(stat) {
|
||||
break
|
||||
}
|
||||
filename = fmt.Sprintf("%s (%d)%s", base, j, ext)
|
||||
}
|
||||
return filename
|
||||
}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"github.com/spf13/cobra"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
var (
|
||||
sessionIDFlag bool
|
||||
)
|
||||
|
||||
var loginCmd = &cobra.Command{
|
||||
Use: "login",
|
||||
Short: "Login to crunchyroll",
|
||||
Args: cobra.RangeArgs(1, 2),
|
||||
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
if sessionIDFlag {
|
||||
return loginSessionID(args[0], false)
|
||||
} else {
|
||||
return loginCredentials(args[0], args[1])
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(loginCmd)
|
||||
loginCmd.Flags().BoolVar(&sessionIDFlag, "session-id", false, "session id")
|
||||
}
|
||||
|
||||
func loginCredentials(email, password string) error {
|
||||
out.Debugln("Logging in via credentials")
|
||||
session, err := crunchyroll.LoginWithCredentials(email, password, locale, client)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return loginSessionID(session.SessionID, true)
|
||||
}
|
||||
|
||||
func loginSessionID(sessionID string, alreadyChecked bool) error {
|
||||
if !alreadyChecked {
|
||||
out.Debugln("Logging in via session id")
|
||||
if _, err := crunchyroll.LoginWithSessionID(sessionID, locale, client); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
out.Infoln("Due to security reasons, you have to login again on the next reboot")
|
||||
return ioutil.WriteFile(sessionIDPath, []byte(sessionID), 0777)
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"github.com/spf13/cobra"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var (
|
||||
client *http.Client
|
||||
locale crunchyroll.LOCALE
|
||||
crunchy *crunchyroll.Crunchyroll
|
||||
out = newLogger(false, true, true, colorFlag)
|
||||
|
||||
quietFlag bool
|
||||
verboseFlag bool
|
||||
proxyFlag string
|
||||
localeFlag string
|
||||
colorFlag bool
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "crunchyroll",
|
||||
Short: "Download crunchyroll videos with ease",
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
if verboseFlag {
|
||||
out = newLogger(true, true, true, colorFlag)
|
||||
} else if quietFlag {
|
||||
out = newLogger(false, false, false, false)
|
||||
}
|
||||
|
||||
out.DebugLog.Printf("Executing `%s` command with %d arg(s)\n", cmd.Name(), len(args))
|
||||
|
||||
locale = localeToLOCALE(localeFlag)
|
||||
|
||||
client, err = createOrDefaultClient(proxyFlag)
|
||||
return
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.PersistentFlags().BoolVarP(&quietFlag, "quiet", "q", false, "Disable all output")
|
||||
rootCmd.PersistentFlags().BoolVarP(&verboseFlag, "verbose", "v", false, "Adds debug messages to the normal output")
|
||||
rootCmd.PersistentFlags().StringVarP(&proxyFlag, "proxy", "p", "", "Proxy to use")
|
||||
rootCmd.PersistentFlags().StringVarP(&localeFlag, "locale", "l", systemLocale(), "The locale to use")
|
||||
rootCmd.PersistentFlags().BoolVar(&colorFlag, "color", false, "Colored output. Only available on not windows systems")
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
rootCmd.CompletionOptions.DisableDefaultCmd = true
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
out.Errln(r)
|
||||
// change color to red
|
||||
if colorFlag && runtime.GOOS != "windows" {
|
||||
out.ErrLog.SetOutput(&loggerWriter{original: out.ErrLog.Writer(), color: "\033[31m"})
|
||||
}
|
||||
out.Debugln(string(debug.Stack()))
|
||||
os.Exit(2)
|
||||
}
|
||||
}()
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
out.Fatalln(err)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,310 +0,0 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"github.com/ByteDream/crunchyroll-go/utils"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var sessionIDPath = filepath.Join(os.TempDir(), ".crunchy")
|
||||
|
||||
type progress struct {
|
||||
status bool
|
||||
message string
|
||||
}
|
||||
|
||||
type logger struct {
|
||||
DebugLog *log.Logger
|
||||
InfoLog *log.Logger
|
||||
ErrLog *log.Logger
|
||||
|
||||
devView bool
|
||||
|
||||
progressWG sync.Mutex
|
||||
progress chan progress
|
||||
}
|
||||
|
||||
func newLogger(debug, info, err bool, color bool) *logger {
|
||||
debugLog, infoLog, errLog := log.New(io.Discard, "=> ", 0), log.New(io.Discard, "=> ", 0), log.New(io.Discard, "=> ", 0)
|
||||
|
||||
debugColor, infoColor, errColor := "", "", ""
|
||||
if color && runtime.GOOS != "windows" {
|
||||
debugColor, infoColor, errColor = "\033[95m", "\033[96m", "\033[31m"
|
||||
}
|
||||
|
||||
if debug {
|
||||
debugLog.SetOutput(&loggerWriter{original: os.Stdout, color: debugColor})
|
||||
}
|
||||
if info {
|
||||
infoLog.SetOutput(&loggerWriter{original: os.Stdout, color: infoColor})
|
||||
}
|
||||
if err {
|
||||
errLog.SetOutput(&loggerWriter{original: os.Stdout, color: errColor})
|
||||
}
|
||||
|
||||
if debug {
|
||||
debugLog = log.New(debugLog.Writer(), "[debug] ", 0)
|
||||
infoLog = log.New(infoLog.Writer(), "[info] ", 0)
|
||||
errLog = log.New(errLog.Writer(), "[err] ", 0)
|
||||
}
|
||||
|
||||
return &logger{
|
||||
DebugLog: debugLog,
|
||||
InfoLog: infoLog,
|
||||
ErrLog: errLog,
|
||||
|
||||
devView: debug,
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logger) Empty() {
|
||||
if !l.devView && l.InfoLog.Writer() != io.Discard {
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logger) StartProgress(message string) {
|
||||
if l.devView {
|
||||
l.InfoLog.Println(message)
|
||||
return
|
||||
}
|
||||
l.progress = make(chan progress)
|
||||
|
||||
go func() {
|
||||
states := []string{"-", "\\", "|", "/"}
|
||||
for i := 0; ; i++ {
|
||||
l.progressWG.Lock()
|
||||
select {
|
||||
case p := <-l.progress:
|
||||
// clearing the last line
|
||||
fmt.Printf("\r%s\r", strings.Repeat(" ", len(l.InfoLog.Prefix())+len(message)+2))
|
||||
if p.status {
|
||||
successTag := "✔"
|
||||
if runtime.GOOS == "windows" {
|
||||
successTag = "~"
|
||||
}
|
||||
l.InfoLog.Printf("%s %s", successTag, p.message)
|
||||
} else {
|
||||
errorTag := "✘"
|
||||
if runtime.GOOS == "windows" {
|
||||
errorTag = "!"
|
||||
}
|
||||
l.ErrLog.Printf("%s %s", errorTag, p.message)
|
||||
}
|
||||
l.progress = nil
|
||||
l.progressWG.Unlock()
|
||||
return
|
||||
default:
|
||||
if i%10 == 0 {
|
||||
fmt.Printf("\r%s%s %s", l.InfoLog.Prefix(), states[i/10%4], message)
|
||||
}
|
||||
time.Sleep(35 * time.Millisecond)
|
||||
l.progressWG.Unlock()
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (l *logger) StartProgressf(message string, a ...interface{}) {
|
||||
l.StartProgress(fmt.Sprintf(message, a...))
|
||||
}
|
||||
|
||||
func (l *logger) EndProgress(successful bool, message string) {
|
||||
if l.devView {
|
||||
if successful {
|
||||
l.InfoLog.Print(message)
|
||||
} else {
|
||||
l.ErrLog.Print(message)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
l.progress <- progress{
|
||||
status: successful,
|
||||
message: message,
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logger) EndProgressf(successful bool, message string, a ...interface{}) {
|
||||
l.EndProgress(successful, fmt.Sprintf(message, a...))
|
||||
}
|
||||
|
||||
func (l *logger) Debugln(v ...interface{}) {
|
||||
l.print(0, v...)
|
||||
}
|
||||
|
||||
func (l *logger) Debugf(message string, a ...interface{}) {
|
||||
l.print(0, fmt.Sprintf(message, a...))
|
||||
}
|
||||
|
||||
func (l *logger) Infoln(v ...interface{}) {
|
||||
l.print(1, v...)
|
||||
}
|
||||
|
||||
func (l *logger) Infof(message string, a ...interface{}) {
|
||||
l.print(1, fmt.Sprintf(message, a...))
|
||||
}
|
||||
|
||||
func (l *logger) Errln(v ...interface{}) {
|
||||
l.print(2, v...)
|
||||
}
|
||||
|
||||
func (l *logger) Errf(message string, a ...interface{}) {
|
||||
l.print(2, fmt.Sprintf(message, a...))
|
||||
}
|
||||
|
||||
func (l *logger) Fatalln(v ...interface{}) {
|
||||
l.print(2, v...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func (l *logger) Fatalf(message string, a ...interface{}) {
|
||||
l.print(2, fmt.Sprintf(message, a...))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func (l *logger) print(level int, v ...interface{}) {
|
||||
if l.progress != nil {
|
||||
l.progressWG.Lock()
|
||||
defer l.progressWG.Unlock()
|
||||
fmt.Print("\r")
|
||||
}
|
||||
|
||||
switch level {
|
||||
case 0:
|
||||
l.DebugLog.Print(v...)
|
||||
case 1:
|
||||
l.InfoLog.Print(v...)
|
||||
case 2:
|
||||
l.ErrLog.Print(v...)
|
||||
}
|
||||
}
|
||||
|
||||
type loggerWriter struct {
|
||||
io.Writer
|
||||
|
||||
original io.Writer
|
||||
color string
|
||||
}
|
||||
|
||||
func (lw *loggerWriter) Write(p []byte) (n int, err error) {
|
||||
if lw.color != "" {
|
||||
p = append([]byte(lw.color), p...)
|
||||
p = append(p, []byte("\033[0m")...)
|
||||
}
|
||||
return lw.original.Write(p)
|
||||
}
|
||||
|
||||
// systemLocale receives the system locale
|
||||
// https://stackoverflow.com/questions/51829386/golang-get-system-language/51831590#51831590
|
||||
func systemLocale() string {
|
||||
if runtime.GOOS != "windows" {
|
||||
if lang, ok := os.LookupEnv("LANG"); ok {
|
||||
return strings.ReplaceAll(strings.Split(lang, ".")[0], "_", "-")
|
||||
}
|
||||
} else {
|
||||
cmd := exec.Command("powershell", "Get-Culture | select -exp Name")
|
||||
if output, err := cmd.Output(); err != nil {
|
||||
return strings.Trim(string(output), "\r\n")
|
||||
}
|
||||
}
|
||||
return "en-US"
|
||||
}
|
||||
|
||||
func localeToLOCALE(locale string) crunchyroll.LOCALE {
|
||||
if l := crunchyroll.LOCALE(locale); utils.ValidateLocale(l) {
|
||||
return l
|
||||
} else {
|
||||
out.Errf("%s is not a supported locale, using %s as fallback\n", locale, crunchyroll.US)
|
||||
return crunchyroll.US
|
||||
}
|
||||
}
|
||||
|
||||
func allLocalesAsStrings() (locales []string) {
|
||||
for _, locale := range utils.AllLocales {
|
||||
locales = append(locales, string(locale))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func createOrDefaultClient(proxy string) (*http.Client, error) {
|
||||
if proxy == "" {
|
||||
return http.DefaultClient, nil
|
||||
} else {
|
||||
out.Infof("Using custom proxy %s\n", proxy)
|
||||
proxyURL, err := url.Parse(proxy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client := &http.Client{
|
||||
Transport: &http.Transport{
|
||||
DisableCompression: true,
|
||||
Proxy: http.ProxyURL(proxyURL),
|
||||
},
|
||||
Timeout: 30 * time.Second,
|
||||
}
|
||||
return client, nil
|
||||
}
|
||||
}
|
||||
|
||||
func loadSessionID() (string, error) {
|
||||
if _, stat := os.Stat(sessionIDPath); os.IsNotExist(stat) {
|
||||
out.Fatalf("To use this command, login first. Type `%s login -h` to get help\n", os.Args[0])
|
||||
}
|
||||
body, err := ioutil.ReadFile(sessionIDPath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return strings.ReplaceAll(string(body), "\n", ""), nil
|
||||
}
|
||||
|
||||
func loadCrunchy() {
|
||||
out.StartProgress("Logging in")
|
||||
sessionID, err := loadSessionID()
|
||||
if err == nil {
|
||||
if crunchy, err = crunchyroll.LoginWithSessionID(sessionID, locale, client); err != nil {
|
||||
out.EndProgress(false, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
} else {
|
||||
out.EndProgress(false, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
out.EndProgress(true, "Logged in")
|
||||
out.Debugf("Logged in with session id %s\n", sessionID)
|
||||
}
|
||||
|
||||
func hasFFmpeg() bool {
|
||||
cmd := exec.Command("ffmpeg", "-h")
|
||||
return cmd.Run() == nil
|
||||
}
|
||||
|
||||
func terminalWidth() int {
|
||||
if runtime.GOOS != "windows" {
|
||||
cmd := exec.Command("stty", "size")
|
||||
cmd.Stdin = os.Stdin
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
return 60
|
||||
}
|
||||
width, err := strconv.Atoi(strings.Split(strings.ReplaceAll(string(out), "\n", ""), " ")[1])
|
||||
if err != nil {
|
||||
return 60
|
||||
}
|
||||
return width
|
||||
}
|
||||
return 60
|
||||
}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
package main
|
||||
|
||||
// the cli will be redesigned soon
|
||||
|
||||
import (
|
||||
"github.com/ByteDream/crunchyroll-go/cmd/crunchyroll-go/cmd"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
}
|
||||
51
crunchy-cli-core/Cargo.toml
Normal file
51
crunchy-cli-core/Cargo.toml
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
[package]
|
||||
name = "crunchy-cli-core"
|
||||
authors = ["Crunchy Labs Maintainers"]
|
||||
version = "3.6.7"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[features]
|
||||
rustls-tls = ["reqwest/rustls-tls"]
|
||||
native-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn"]
|
||||
openssl-tls = ["reqwest/native-tls", "reqwest/native-tls-alpn", "dep:rustls-native-certs"]
|
||||
openssl-tls-static = ["reqwest/native-tls", "reqwest/native-tls-alpn", "reqwest/native-tls-vendored", "dep:rustls-native-certs"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
async-speed-limit = "0.4"
|
||||
clap = { version = "4.5", features = ["derive", "string"] }
|
||||
chrono = "0.4"
|
||||
crunchyroll-rs = { version = "0.11.4", features = ["experimental-stabilizations", "tower"] }
|
||||
ctrlc = "3.4"
|
||||
dialoguer = { version = "0.11", default-features = false }
|
||||
dirs = "5.0"
|
||||
derive_setters = "0.1"
|
||||
futures-util = { version = "0.3", features = ["io"] }
|
||||
fs2 = "0.4"
|
||||
http = "1.1"
|
||||
indicatif = "0.17"
|
||||
lazy_static = "1.4"
|
||||
log = { version = "0.4", features = ["std"] }
|
||||
num_cpus = "1.16"
|
||||
regex = "1.10"
|
||||
reqwest = { version = "0.12", features = ["socks", "stream"] }
|
||||
rsubs-lib = "~0.3.2"
|
||||
rusty-chromaprint = "0.2"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
serde_plain = "1.0"
|
||||
shlex = "1.3"
|
||||
sys-locale = "0.3"
|
||||
tempfile = "3.10"
|
||||
time = "0.3"
|
||||
tokio = { version = "1.38", features = ["io-util", "macros", "net", "rt-multi-thread", "time"] }
|
||||
tokio-util = "0.7"
|
||||
tower-service = "0.3"
|
||||
rustls-native-certs = { version = "0.7", optional = true }
|
||||
|
||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||
nix = { version = "0.28", features = ["fs"] }
|
||||
|
||||
[build-dependencies]
|
||||
chrono = "0.4"
|
||||
34
crunchy-cli-core/build.rs
Normal file
34
crunchy-cli-core/build.rs
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
fn main() -> std::io::Result<()> {
|
||||
println!(
|
||||
"cargo:rustc-env=GIT_HASH={}",
|
||||
std::env::var("CRUNCHY_CLI_GIT_HASH")
|
||||
.or::<std::io::Error>(Ok(get_short_commit_hash()?.unwrap_or_default()))?
|
||||
);
|
||||
println!(
|
||||
"cargo:rustc-env=BUILD_DATE={}",
|
||||
chrono::Utc::now().format("%F")
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_short_commit_hash() -> std::io::Result<Option<String>> {
|
||||
let git = std::process::Command::new("git")
|
||||
.arg("rev-parse")
|
||||
.arg("--short")
|
||||
.arg("HEAD")
|
||||
.output();
|
||||
|
||||
match git {
|
||||
Ok(cmd) => Ok(Some(
|
||||
String::from_utf8_lossy(cmd.stdout.as_slice()).to_string(),
|
||||
)),
|
||||
Err(e) => {
|
||||
if e.kind() != std::io::ErrorKind::NotFound {
|
||||
Err(e)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
692
crunchy-cli-core/src/archive/command.rs
Normal file
692
crunchy-cli-core/src/archive/command.rs
Normal file
|
|
@ -0,0 +1,692 @@
|
|||
use crate::utils::context::Context;
|
||||
use crate::utils::download::{
|
||||
DownloadBuilder, DownloadFormat, DownloadFormatMetadata, MergeBehavior,
|
||||
};
|
||||
use crate::utils::ffmpeg::FFmpegPreset;
|
||||
use crate::utils::filter::{Filter, FilterMediaScope};
|
||||
use crate::utils::format::{Format, SingleFormat};
|
||||
use crate::utils::locale::{all_locale_in_locales, resolve_locales, LanguageTagging};
|
||||
use crate::utils::log::progress;
|
||||
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
||||
use crate::utils::parse::parse_url;
|
||||
use crate::utils::video::stream_data_from_stream;
|
||||
use crate::Execute;
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use chrono::Duration;
|
||||
use crunchyroll_rs::media::{Resolution, Subtitle};
|
||||
use crunchyroll_rs::Locale;
|
||||
use log::{debug, warn};
|
||||
use regex::Regex;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::iter::zip;
|
||||
use std::ops::Sub;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[clap(about = "Archive a video")]
|
||||
#[command(arg_required_else_help(true))]
|
||||
pub struct Archive {
|
||||
#[arg(help = format!("Audio languages. Can be used multiple times. \
|
||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(long_help = format!("Audio languages. Can be used multiple times. \
|
||||
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||
#[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])]
|
||||
pub(crate) audio: Vec<Locale>,
|
||||
#[arg(skip)]
|
||||
output_audio_locales: Vec<String>,
|
||||
#[arg(help = format!("Subtitle languages. Can be used multiple times. \
|
||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
|
||||
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(short, long, default_values_t = Locale::all())]
|
||||
pub(crate) subtitle: Vec<Locale>,
|
||||
#[arg(skip)]
|
||||
output_subtitle_locales: Vec<String>,
|
||||
|
||||
#[arg(help = "Name of the output file")]
|
||||
#[arg(long_help = "Name of the output file. \
|
||||
If you use one of the following pattern they will get replaced:\n \
|
||||
{title} → Title of the video\n \
|
||||
{series_name} → Name of the series\n \
|
||||
{season_name} → Name of the season\n \
|
||||
{audio} → Audio language of the video\n \
|
||||
{width} → Width of the video\n \
|
||||
{height} → Height of the video\n \
|
||||
{season_number} → Number of the season\n \
|
||||
{episode_number} → Number of the episode\n \
|
||||
{relative_episode_number} → Number of the episode relative to its season\n \
|
||||
{sequence_number} → Like '{episode_number}' but without possible non-number characters\n \
|
||||
{relative_sequence_number} → Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
|
||||
{release_year} → Release year of the video\n \
|
||||
{release_month} → Release month of the video\n \
|
||||
{release_day} → Release day of the video\n \
|
||||
{series_id} → ID of the series\n \
|
||||
{season_id} → ID of the season\n \
|
||||
{episode_id} → ID of the episode")]
|
||||
#[arg(short, long, default_value = "{title}.mkv")]
|
||||
pub(crate) output: String,
|
||||
#[arg(help = "Name of the output file if the episode is a special")]
|
||||
#[arg(long_help = "Name of the output file if the episode is a special. \
|
||||
If not set, the '-o'/'--output' flag will be used as name template")]
|
||||
#[arg(long)]
|
||||
pub(crate) output_specials: Option<String>,
|
||||
|
||||
#[arg(help = "Sanitize the output file for use with all operating systems. \
|
||||
This option only affects template options and not static characters.")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) universal_output: bool,
|
||||
|
||||
#[arg(help = "Video resolution")]
|
||||
#[arg(long_help = "The video resolution. \
|
||||
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
|
||||
#[arg(short, long, default_value = "best")]
|
||||
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
||||
pub(crate) resolution: Resolution,
|
||||
|
||||
#[arg(
|
||||
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'sync', 'audio' and 'video'"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
|
||||
With this flag you can set the behavior when handling multiple language.
|
||||
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language), 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio') and 'sync' (detects if videos differ in length: if so, tries to find the offset of matching audio parts and removes it from the beginning, otherwise it behaves like 'audio')"
|
||||
)]
|
||||
#[arg(short, long, default_value = "auto")]
|
||||
#[arg(value_parser = MergeBehavior::parse)]
|
||||
pub(crate) merge: MergeBehavior,
|
||||
#[arg(
|
||||
help = "If the merge behavior is 'auto' or 'sync', consider videos to be of equal lengths if the difference in length is smaller than the specified milliseconds"
|
||||
)]
|
||||
#[arg(long, default_value_t = 200)]
|
||||
pub(crate) merge_time_tolerance: u32,
|
||||
#[arg(
|
||||
help = "If the merge behavior is 'sync', specify the difference by which two fingerprints are considered equal, higher values can help when the algorithm fails"
|
||||
)]
|
||||
#[arg(long, default_value_t = 6)]
|
||||
pub(crate) merge_sync_tolerance: u32,
|
||||
#[arg(
|
||||
help = "If the merge behavior is 'sync', specify the amount of offset determination runs from which the final offset is calculated, higher values will increase the time required but lead to more precise offsets"
|
||||
)]
|
||||
#[arg(long, default_value_t = 4)]
|
||||
pub(crate) merge_sync_precision: u32,
|
||||
|
||||
#[arg(
|
||||
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
|
||||
)]
|
||||
#[arg(long)]
|
||||
#[arg(value_parser = LanguageTagging::parse)]
|
||||
pub(crate) language_tagging: Option<LanguageTagging>,
|
||||
|
||||
#[arg(help = format!("Presets for converting the video to a specific coding format. \
|
||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
|
||||
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
|
||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||
#[arg(long)]
|
||||
#[arg(value_parser = FFmpegPreset::parse)]
|
||||
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
||||
#[arg(
|
||||
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "The number of threads used by ffmpeg to generate the output file. \
|
||||
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
|
||||
By default, ffmpeg chooses the thread count which works best for the output codec"
|
||||
)]
|
||||
#[arg(long)]
|
||||
pub(crate) ffmpeg_threads: Option<usize>,
|
||||
|
||||
#[arg(
|
||||
help = "Set which subtitle language should be set as default / auto shown when starting a video"
|
||||
)]
|
||||
#[arg(long)]
|
||||
pub(crate) default_subtitle: Option<Locale>,
|
||||
#[arg(help = "Include fonts in the downloaded file")]
|
||||
#[arg(long)]
|
||||
pub(crate) include_fonts: bool,
|
||||
#[arg(
|
||||
help = "Includes chapters (e.g. intro, credits, ...). Only works if `--merge` is set to 'audio'"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Includes chapters (e.g. intro, credits, ...). . Only works if `--merge` is set to 'audio'. \
|
||||
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
|
||||
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
|
||||
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created"
|
||||
)]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) include_chapters: bool,
|
||||
|
||||
#[arg(help = "Omit closed caption subtitles in the downloaded file")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) no_closed_caption: bool,
|
||||
|
||||
#[arg(help = "Skip files which are already existing by their name")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) skip_existing: bool,
|
||||
#[arg(
|
||||
help = "Only works in combination with `--skip-existing`. Sets the method how already existing files should be skipped. Valid methods are 'audio' and 'subtitle'"
|
||||
)]
|
||||
#[arg(long_help = "Only works in combination with `--skip-existing`. \
|
||||
By default, already existing files are determined by their name and the download of the corresponding episode is skipped. \
|
||||
With this flag you can modify this behavior. \
|
||||
Valid options are 'audio' and 'subtitle' (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).")]
|
||||
#[arg(long, default_values_t = SkipExistingMethod::default())]
|
||||
#[arg(value_parser = SkipExistingMethod::parse)]
|
||||
pub(crate) skip_existing_method: Vec<SkipExistingMethod>,
|
||||
#[arg(help = "Skip special episodes")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) skip_specials: bool,
|
||||
|
||||
#[arg(help = "Skip any interactive input")]
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
pub(crate) yes: bool,
|
||||
|
||||
#[arg(help = "The number of threads used to download")]
|
||||
#[arg(short, long, default_value_t = num_cpus::get())]
|
||||
pub(crate) threads: usize,
|
||||
|
||||
#[arg(help = "Crunchyroll series url(s)")]
|
||||
#[arg(required = true)]
|
||||
pub(crate) urls: Vec<String>,
|
||||
}
|
||||
|
||||
impl Execute for Archive {
|
||||
fn pre_check(&mut self) -> Result<()> {
|
||||
if !has_ffmpeg() {
|
||||
bail!("FFmpeg is needed to run this command")
|
||||
} else if PathBuf::from(&self.output)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
!= "mkv"
|
||||
&& !is_special_file(&self.output)
|
||||
&& self.output != "-"
|
||||
{
|
||||
bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported")
|
||||
} else if let Some(special_output) = &self.output_specials {
|
||||
if PathBuf::from(special_output)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
!= "mkv"
|
||||
&& !is_special_file(special_output)
|
||||
&& special_output != "-"
|
||||
{
|
||||
bail!("File extension for special episodes is not '.mkv'. Currently only matroska / '.mkv' files are supported")
|
||||
}
|
||||
}
|
||||
|
||||
if self.include_chapters
|
||||
&& !matches!(self.merge, MergeBehavior::Sync)
|
||||
&& !matches!(self.merge, MergeBehavior::Audio)
|
||||
{
|
||||
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio' or 'sync'")
|
||||
}
|
||||
|
||||
if !self.skip_existing_method.is_empty() && !self.skip_existing {
|
||||
warn!("`--skip-existing-method` has no effect if `--skip-existing` is not set")
|
||||
}
|
||||
|
||||
self.audio = all_locale_in_locales(self.audio.clone());
|
||||
self.subtitle = all_locale_in_locales(self.subtitle.clone());
|
||||
|
||||
if let Some(language_tagging) = &self.language_tagging {
|
||||
self.audio = resolve_locales(&self.audio);
|
||||
self.subtitle = resolve_locales(&self.subtitle);
|
||||
self.output_audio_locales = language_tagging.convert_locales(&self.audio);
|
||||
self.output_subtitle_locales = language_tagging.convert_locales(&self.subtitle);
|
||||
} else {
|
||||
self.output_audio_locales = self
|
||||
.audio
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|l| l.to_string())
|
||||
.collect();
|
||||
self.output_subtitle_locales = self
|
||||
.subtitle
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|l| l.to_string())
|
||||
.collect();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute(self, ctx: Context) -> Result<()> {
|
||||
if !ctx.crunchy.premium().await {
|
||||
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
|
||||
}
|
||||
|
||||
let mut parsed_urls = vec![];
|
||||
|
||||
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
||||
let progress_handler = progress!("Parsing url {}", i + 1);
|
||||
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
||||
Ok((media_collection, url_filter)) => {
|
||||
progress_handler.stop(format!("Parsed url {}", i + 1));
|
||||
parsed_urls.push((media_collection, url_filter))
|
||||
}
|
||||
Err(e) => bail!("url {} could not be parsed: {}", url, e),
|
||||
};
|
||||
}
|
||||
|
||||
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||
let progress_handler = progress!("Fetching series details");
|
||||
let single_format_collection = Filter::new(
|
||||
url_filter,
|
||||
self.audio.clone(),
|
||||
self.subtitle.clone(),
|
||||
|scope, locales| {
|
||||
let audios = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
|
||||
match scope {
|
||||
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} audio", series.title, audios),
|
||||
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} audio", season.season_number, audios),
|
||||
FilterMediaScope::Episode(episodes) => {
|
||||
if episodes.len() == 1 {
|
||||
warn!("Episode {} is not available with {} audio", episodes[0].sequence_number, audios)
|
||||
} else if episodes.len() == 2 {
|
||||
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, audios, episodes[0].sequence_number, episodes[1].sequence_number)
|
||||
} else {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
},
|
||||
|scope, locales| {
|
||||
let subtitles = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
|
||||
match scope {
|
||||
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} subtitles", series.title, subtitles),
|
||||
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} subtitles", season.season_number, subtitles),
|
||||
FilterMediaScope::Episode(episodes) => {
|
||||
if episodes.len() == 1 {
|
||||
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, subtitles)
|
||||
} else if episodes.len() == 2 {
|
||||
warn!("Season {} of season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, episodes[0].season_title, subtitles, episodes[0].sequence_number, episodes[1].sequence_number)
|
||||
} else {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
},
|
||||
|season| {
|
||||
warn!("Skipping premium episodes in season {season}");
|
||||
Ok(())
|
||||
},
|
||||
Format::has_relative_fmt(&self.output),
|
||||
!self.yes,
|
||||
self.skip_specials,
|
||||
ctx.crunchy.premium().await,
|
||||
)
|
||||
.visit(media_collection)
|
||||
.await?;
|
||||
|
||||
if single_format_collection.is_empty() {
|
||||
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
||||
continue;
|
||||
}
|
||||
progress_handler.stop(format!("Loaded series information for url {}", i + 1));
|
||||
|
||||
single_format_collection.full_visual_output();
|
||||
|
||||
let download_builder =
|
||||
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
|
||||
.default_subtitle(self.default_subtitle.clone())
|
||||
.download_fonts(self.include_fonts)
|
||||
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
|
||||
.ffmpeg_threads(self.ffmpeg_threads)
|
||||
.output_format(Some("matroska".to_string()))
|
||||
.audio_sort(Some(self.audio.clone()))
|
||||
.subtitle_sort(Some(self.subtitle.clone()))
|
||||
.no_closed_caption(self.no_closed_caption)
|
||||
.merge_sync_tolerance(match self.merge {
|
||||
MergeBehavior::Sync => Some(self.merge_sync_tolerance),
|
||||
_ => None,
|
||||
})
|
||||
.merge_sync_precision(match self.merge {
|
||||
MergeBehavior::Sync => Some(self.merge_sync_precision),
|
||||
_ => None,
|
||||
})
|
||||
.threads(self.threads)
|
||||
.audio_locale_output_map(
|
||||
zip(self.audio.clone(), self.output_audio_locales.clone()).collect(),
|
||||
)
|
||||
.subtitle_locale_output_map(
|
||||
zip(self.subtitle.clone(), self.output_subtitle_locales.clone()).collect(),
|
||||
);
|
||||
|
||||
for single_formats in single_format_collection.into_iter() {
|
||||
let (download_formats, mut format) = get_format(&self, &single_formats).await?;
|
||||
|
||||
let mut downloader = download_builder.clone().build();
|
||||
for download_format in download_formats {
|
||||
downloader.add_format(download_format)
|
||||
}
|
||||
|
||||
let formatted_path = if format.is_special() {
|
||||
format.format_path(
|
||||
self.output_specials
|
||||
.as_ref()
|
||||
.map_or((&self.output).into(), |so| so.into()),
|
||||
self.universal_output,
|
||||
self.language_tagging.as_ref(),
|
||||
)
|
||||
} else {
|
||||
format.format_path(
|
||||
(&self.output).into(),
|
||||
self.universal_output,
|
||||
self.language_tagging.as_ref(),
|
||||
)
|
||||
};
|
||||
let (mut path, changed) = free_file(formatted_path.clone());
|
||||
|
||||
if changed && self.skip_existing {
|
||||
let mut skip = true;
|
||||
|
||||
if !self.skip_existing_method.is_empty() {
|
||||
if let Some((audio_locales, subtitle_locales)) =
|
||||
get_video_streams(&formatted_path)?
|
||||
{
|
||||
let method_audio = self
|
||||
.skip_existing_method
|
||||
.contains(&SkipExistingMethod::Audio);
|
||||
let method_subtitle = self
|
||||
.skip_existing_method
|
||||
.contains(&SkipExistingMethod::Subtitle);
|
||||
|
||||
let audio_differ = if method_audio {
|
||||
format
|
||||
.locales
|
||||
.iter()
|
||||
.any(|(a, _)| !audio_locales.contains(a))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let subtitle_differ = if method_subtitle {
|
||||
format
|
||||
.locales
|
||||
.clone()
|
||||
.into_iter()
|
||||
.flat_map(|(a, mut s)| {
|
||||
// remove the closed caption if the flag is given to omit
|
||||
// closed captions
|
||||
if self.no_closed_caption && a != Locale::ja_JP {
|
||||
s.retain(|l| l != &a)
|
||||
}
|
||||
s
|
||||
})
|
||||
.any(|l| !subtitle_locales.contains(&l))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if (method_audio && audio_differ)
|
||||
|| (method_subtitle && subtitle_differ)
|
||||
{
|
||||
skip = false;
|
||||
path.clone_from(&formatted_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if skip {
|
||||
debug!(
|
||||
"Skipping already existing file '{}'",
|
||||
formatted_path.to_string_lossy()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
format.locales.sort_by(|(a, _), (b, _)| {
|
||||
self.audio
|
||||
.iter()
|
||||
.position(|l| l == a)
|
||||
.cmp(&self.audio.iter().position(|l| l == b))
|
||||
});
|
||||
for (_, subtitles) in format.locales.iter_mut() {
|
||||
subtitles.sort_by(|a, b| {
|
||||
self.subtitle
|
||||
.iter()
|
||||
.position(|l| l == a)
|
||||
.cmp(&self.subtitle.iter().position(|l| l == b))
|
||||
})
|
||||
}
|
||||
|
||||
format.visual_output(&path);
|
||||
|
||||
downloader.download(&path).await?
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub(crate) enum SkipExistingMethod {
|
||||
Audio,
|
||||
Subtitle,
|
||||
}
|
||||
|
||||
impl Display for SkipExistingMethod {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let value = match self {
|
||||
SkipExistingMethod::Audio => "audio",
|
||||
SkipExistingMethod::Subtitle => "subtitle",
|
||||
};
|
||||
write!(f, "{}", value)
|
||||
}
|
||||
}
|
||||
|
||||
impl SkipExistingMethod {
|
||||
fn parse(s: &str) -> Result<Self, String> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"audio" => Ok(Self::Audio),
|
||||
"subtitle" => Ok(Self::Subtitle),
|
||||
_ => Err(format!("invalid skip existing method '{}'", s)),
|
||||
}
|
||||
}
|
||||
|
||||
fn default<'a>() -> &'a [Self] {
|
||||
&[]
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_format(
|
||||
archive: &Archive,
|
||||
single_formats: &Vec<SingleFormat>,
|
||||
) -> Result<(Vec<DownloadFormat>, Format)> {
|
||||
let mut format_pairs = vec![];
|
||||
let mut single_format_to_format_pairs = vec![];
|
||||
|
||||
for single_format in single_formats {
|
||||
let stream = single_format.stream().await?;
|
||||
let Some((video, audio, _)) =
|
||||
stream_data_from_stream(&stream, &archive.resolution, None).await?
|
||||
else {
|
||||
if single_format.is_episode() {
|
||||
bail!(
|
||||
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
||||
archive.resolution,
|
||||
single_format.episode_number,
|
||||
single_format.title,
|
||||
single_format.series_name,
|
||||
single_format.season_number,
|
||||
)
|
||||
} else {
|
||||
bail!(
|
||||
"Resolution ({}) is not available for {} ({})",
|
||||
archive.resolution,
|
||||
single_format.source_type(),
|
||||
single_format.title
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let subtitles: Vec<(Subtitle, bool)> = archive
|
||||
.subtitle
|
||||
.iter()
|
||||
.flat_map(|s| {
|
||||
let mut subtitles = vec![];
|
||||
if let Some(caption) = stream.captions.get(s) {
|
||||
subtitles.push((caption.clone(), true))
|
||||
}
|
||||
if let Some(subtitle) = stream.subtitles.get(s) {
|
||||
// the subtitle is probably cc if the audio is not japanese or only one subtitle
|
||||
// exists for this stream
|
||||
let cc = single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1;
|
||||
// only include the subtitles if no cc subtitle is already present or if it's
|
||||
// not cc
|
||||
if subtitles.is_empty() || !cc {
|
||||
subtitles.push((subtitle.clone(), cc))
|
||||
}
|
||||
}
|
||||
subtitles
|
||||
})
|
||||
.collect();
|
||||
|
||||
format_pairs.push((single_format, video.clone(), audio, subtitles.clone()));
|
||||
single_format_to_format_pairs.push((single_format.clone(), video, subtitles));
|
||||
|
||||
stream.invalidate().await?
|
||||
}
|
||||
|
||||
let mut download_formats = vec![];
|
||||
|
||||
match archive.merge {
|
||||
MergeBehavior::Video => {
|
||||
for (single_format, video, audio, subtitles) in format_pairs {
|
||||
download_formats.push(DownloadFormat {
|
||||
video: (video, single_format.audio.clone()),
|
||||
audios: vec![(audio, single_format.audio.clone())],
|
||||
subtitles,
|
||||
metadata: DownloadFormatMetadata { skip_events: None },
|
||||
})
|
||||
}
|
||||
}
|
||||
MergeBehavior::Audio => download_formats.push(DownloadFormat {
|
||||
video: (
|
||||
format_pairs.first().unwrap().1.clone(),
|
||||
format_pairs.first().unwrap().0.audio.clone(),
|
||||
),
|
||||
audios: format_pairs
|
||||
.iter()
|
||||
.map(|(single_format, _, audio, _)| (audio.clone(), single_format.audio.clone()))
|
||||
.collect(),
|
||||
// mix all subtitles together and then reduce them via a map so that only one subtitle
|
||||
// per language exists
|
||||
subtitles: format_pairs
|
||||
.iter()
|
||||
.flat_map(|(_, _, _, subtitles)| subtitles.clone())
|
||||
.collect(),
|
||||
metadata: DownloadFormatMetadata {
|
||||
skip_events: if archive.include_chapters {
|
||||
format_pairs.first().unwrap().0.skip_events().await?
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
}),
|
||||
MergeBehavior::Auto | MergeBehavior::Sync => {
|
||||
let mut d_formats: Vec<(Duration, DownloadFormat)> = vec![];
|
||||
|
||||
for (single_format, video, audio, subtitles) in format_pairs {
|
||||
let closest_format = d_formats.iter_mut().min_by(|(x, _), (y, _)| {
|
||||
x.sub(single_format.duration)
|
||||
.abs()
|
||||
.cmp(&y.sub(single_format.duration).abs())
|
||||
});
|
||||
|
||||
match closest_format {
|
||||
Some(closest_format)
|
||||
if closest_format
|
||||
.0
|
||||
.sub(single_format.duration)
|
||||
.abs()
|
||||
.num_milliseconds()
|
||||
< archive.merge_time_tolerance.into() =>
|
||||
{
|
||||
// If less than `audio_error` apart, use same audio.
|
||||
closest_format
|
||||
.1
|
||||
.audios
|
||||
.push((audio, single_format.audio.clone()));
|
||||
closest_format.1.subtitles.extend(subtitles);
|
||||
}
|
||||
_ => {
|
||||
d_formats.push((
|
||||
single_format.duration,
|
||||
DownloadFormat {
|
||||
video: (video, single_format.audio.clone()),
|
||||
audios: vec![(audio, single_format.audio.clone())],
|
||||
subtitles,
|
||||
metadata: DownloadFormatMetadata {
|
||||
skip_events: if archive.include_chapters {
|
||||
single_format.skip_events().await?
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
for (_, d_format) in d_formats.into_iter() {
|
||||
download_formats.push(d_format);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((
|
||||
download_formats,
|
||||
Format::from_single_formats(single_format_to_format_pairs),
|
||||
))
|
||||
}
|
||||
|
||||
fn get_video_streams(path: &Path) -> Result<Option<(Vec<Locale>, Vec<Locale>)>> {
|
||||
let video_streams =
|
||||
Regex::new(r"(?m)Stream\s#\d+:\d+\((?P<language>.+)\):\s(?P<type>(Audio|Subtitle))")
|
||||
.unwrap();
|
||||
|
||||
let ffmpeg = Command::new("ffmpeg")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::piped())
|
||||
.arg("-hide_banner")
|
||||
.args(["-i", &path.to_string_lossy()])
|
||||
.output()?;
|
||||
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
|
||||
|
||||
let mut audio = vec![];
|
||||
let mut subtitle = vec![];
|
||||
for cap in video_streams.captures_iter(&ffmpeg_output) {
|
||||
let locale = cap.name("language").unwrap().as_str();
|
||||
let type_ = cap.name("type").unwrap().as_str();
|
||||
|
||||
match type_ {
|
||||
"Audio" => audio.push(Locale::from(locale.to_string())),
|
||||
"Subtitle" => subtitle.push(Locale::from(locale.to_string())),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
if audio.is_empty() && subtitle.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some((audio, subtitle)))
|
||||
}
|
||||
}
|
||||
3
crunchy-cli-core/src/archive/mod.rs
Normal file
3
crunchy-cli-core/src/archive/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
mod command;
|
||||
|
||||
pub use command::Archive;
|
||||
483
crunchy-cli-core/src/download/command.rs
Normal file
483
crunchy-cli-core/src/download/command.rs
Normal file
|
|
@ -0,0 +1,483 @@
|
|||
use crate::utils::context::Context;
|
||||
use crate::utils::download::{DownloadBuilder, DownloadFormat, DownloadFormatMetadata};
|
||||
use crate::utils::ffmpeg::{FFmpegPreset, SOFTSUB_CONTAINERS};
|
||||
use crate::utils::filter::{Filter, FilterMediaScope};
|
||||
use crate::utils::format::{Format, SingleFormat};
|
||||
use crate::utils::locale::{resolve_locales, LanguageTagging};
|
||||
use crate::utils::log::progress;
|
||||
use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
|
||||
use crate::utils::parse::parse_url;
|
||||
use crate::utils::video::stream_data_from_stream;
|
||||
use crate::Execute;
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use crunchyroll_rs::media::Resolution;
|
||||
use crunchyroll_rs::Locale;
|
||||
use log::{debug, error, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[clap(about = "Download a video")]
|
||||
#[command(arg_required_else_help(true))]
|
||||
pub struct Download {
|
||||
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
|
||||
Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
|
||||
pub(crate) audio: Locale,
|
||||
#[arg(skip)]
|
||||
output_audio_locale: String,
|
||||
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
|
||||
Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(short, long)]
|
||||
pub(crate) subtitle: Option<Locale>,
|
||||
#[arg(skip)]
|
||||
output_subtitle_locale: String,
|
||||
|
||||
#[arg(help = "Name of the output file")]
|
||||
#[arg(long_help = "Name of the output file. \
|
||||
If you use one of the following pattern they will get replaced:\n \
|
||||
{title} → Title of the video\n \
|
||||
{series_name} → Name of the series\n \
|
||||
{season_name} → Name of the season\n \
|
||||
{audio} → Audio language of the video\n \
|
||||
{width} → Width of the video\n \
|
||||
{height} → Height of the video\n \
|
||||
{season_number} → Number of the season\n \
|
||||
{episode_number} → Number of the episode\n \
|
||||
{relative_episode_number} → Number of the episode relative to its season\n \
|
||||
{sequence_number} → Like '{episode_number}' but without possible non-number characters\n \
|
||||
{relative_sequence_number} → Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
|
||||
{release_year} → Release year of the video\n \
|
||||
{release_month} → Release month of the video\n \
|
||||
{release_day} → Release day of the video\n \
|
||||
{series_id} → ID of the series\n \
|
||||
{season_id} → ID of the season\n \
|
||||
{episode_id} → ID of the episode")]
|
||||
#[arg(short, long, default_value = "{title}.mp4")]
|
||||
pub(crate) output: String,
|
||||
#[arg(help = "Name of the output file if the episode is a special")]
|
||||
#[arg(long_help = "Name of the output file if the episode is a special. \
|
||||
If not set, the '-o'/'--output' flag will be used as name template")]
|
||||
#[arg(long)]
|
||||
pub(crate) output_specials: Option<String>,
|
||||
|
||||
#[arg(help = "Sanitize the output file for use with all operating systems. \
|
||||
This option only affects template options and not static characters.")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) universal_output: bool,
|
||||
|
||||
#[arg(help = "Video resolution")]
|
||||
#[arg(long_help = "The video resolution. \
|
||||
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
|
||||
Specifying the exact pixels is not recommended, use one of the other options instead. \
|
||||
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
|
||||
The available common-use words are 'best' (choose the best resolution available) and 'worst' (worst resolution available)")]
|
||||
#[arg(short, long, default_value = "best")]
|
||||
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
|
||||
pub(crate) resolution: Resolution,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
|
||||
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
|
||||
)]
|
||||
#[arg(value_parser = LanguageTagging::parse)]
|
||||
pub(crate) language_tagging: Option<LanguageTagging>,
|
||||
|
||||
#[arg(help = format!("Presets for converting the video to a specific coding format. \
|
||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \
|
||||
If you need more specific ffmpeg customizations you can pass ffmpeg output arguments instead of a preset as value. \
|
||||
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
|
||||
#[arg(long)]
|
||||
#[arg(value_parser = FFmpegPreset::parse)]
|
||||
pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
|
||||
#[arg(
|
||||
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "The number of threads used by ffmpeg to generate the output file. \
|
||||
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
|
||||
By default, ffmpeg chooses the thread count which works best for the output codec"
|
||||
)]
|
||||
#[arg(long)]
|
||||
pub(crate) ffmpeg_threads: Option<usize>,
|
||||
|
||||
#[arg(help = "Skip files which are already existing by their name")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) skip_existing: bool,
|
||||
#[arg(help = "Skip special episodes")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) skip_specials: bool,
|
||||
|
||||
#[arg(help = "Includes chapters (e.g. intro, credits, ...)")]
|
||||
#[arg(long_help = "Includes chapters (e.g. intro, credits, ...). \
|
||||
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
|
||||
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
|
||||
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) include_chapters: bool,
|
||||
|
||||
#[arg(help = "Skip any interactive input")]
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
pub(crate) yes: bool,
|
||||
|
||||
#[arg(help = "Force subtitles to be always burnt-in")]
|
||||
#[arg(long, default_value_t = false)]
|
||||
pub(crate) force_hardsub: bool,
|
||||
|
||||
#[arg(help = "The number of threads used to download")]
|
||||
#[arg(short, long, default_value_t = num_cpus::get())]
|
||||
pub(crate) threads: usize,
|
||||
|
||||
#[arg(help = "Url(s) to Crunchyroll episodes or series")]
|
||||
#[arg(required = true)]
|
||||
pub(crate) urls: Vec<String>,
|
||||
}
|
||||
|
||||
impl Execute for Download {
|
||||
fn pre_check(&mut self) -> Result<()> {
|
||||
if !has_ffmpeg() {
|
||||
bail!("FFmpeg is needed to run this command")
|
||||
} else if Path::new(&self.output)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.is_empty()
|
||||
&& !is_special_file(&self.output)
|
||||
&& self.output != "-"
|
||||
{
|
||||
bail!("No file extension found. Please specify a file extension (via `-o`) for the output file")
|
||||
}
|
||||
|
||||
if self.subtitle.is_some() {
|
||||
if let Some(ext) = Path::new(&self.output).extension() {
|
||||
if self.force_hardsub {
|
||||
warn!("Hardsubs are forced. Adding subtitles may take a while")
|
||||
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
|
||||
warn!("Detected a container which does not support softsubs. Adding subtitles may take a while")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(special_output) = &self.output_specials {
|
||||
if Path::new(special_output)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.is_empty()
|
||||
&& !is_special_file(special_output)
|
||||
&& special_output != "-"
|
||||
{
|
||||
bail!("No file extension found. Please specify a file extension (via `--output-specials`) for the output file")
|
||||
}
|
||||
if let Some(ext) = Path::new(special_output).extension() {
|
||||
if self.force_hardsub {
|
||||
warn!("Hardsubs are forced for special episodes. Adding subtitles may take a while")
|
||||
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
|
||||
warn!("Detected a container which does not support softsubs. Adding subtitles for special episodes may take a while")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(language_tagging) = &self.language_tagging {
|
||||
self.audio = resolve_locales(&[self.audio.clone()]).remove(0);
|
||||
self.subtitle = self
|
||||
.subtitle
|
||||
.as_ref()
|
||||
.map(|s| resolve_locales(&[s.clone()]).remove(0));
|
||||
self.output_audio_locale = language_tagging.for_locale(&self.audio);
|
||||
self.output_subtitle_locale = self
|
||||
.subtitle
|
||||
.as_ref()
|
||||
.map(|s| language_tagging.for_locale(s))
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
self.output_audio_locale = self.audio.to_string();
|
||||
self.output_subtitle_locale = self
|
||||
.subtitle
|
||||
.as_ref()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute(self, ctx: Context) -> Result<()> {
|
||||
if !ctx.crunchy.premium().await {
|
||||
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
|
||||
}
|
||||
|
||||
let mut parsed_urls = vec![];
|
||||
|
||||
let output_supports_softsubs = SOFTSUB_CONTAINERS.contains(
|
||||
&Path::new(&self.output)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.as_ref(),
|
||||
);
|
||||
let special_output_supports_softsubs = if let Some(so) = &self.output_specials {
|
||||
SOFTSUB_CONTAINERS.contains(
|
||||
&Path::new(so)
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.as_ref(),
|
||||
)
|
||||
} else {
|
||||
output_supports_softsubs
|
||||
};
|
||||
|
||||
for (i, url) in self.urls.clone().into_iter().enumerate() {
|
||||
let progress_handler = progress!("Parsing url {}", i + 1);
|
||||
match parse_url(&ctx.crunchy, url.clone(), true).await {
|
||||
Ok((media_collection, url_filter)) => {
|
||||
progress_handler.stop(format!("Parsed url {}", i + 1));
|
||||
parsed_urls.push((media_collection, url_filter))
|
||||
}
|
||||
Err(e) => bail!("url {} could not be parsed: {}", url, e),
|
||||
};
|
||||
}
|
||||
|
||||
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
|
||||
let progress_handler = progress!("Fetching series details");
|
||||
let single_format_collection = Filter::new(
|
||||
url_filter,
|
||||
vec![self.audio.clone()],
|
||||
self.subtitle.as_ref().map_or(vec![], |s| vec![s.clone()]),
|
||||
|scope, locales| {
|
||||
match scope {
|
||||
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} audio", series.title, locales[0]),
|
||||
FilterMediaScope::Season(season) => {
|
||||
error!("Season {} is not available with {} audio", season.season_number, locales[0]);
|
||||
Ok(false)
|
||||
}
|
||||
FilterMediaScope::Episode(episodes) => {
|
||||
if episodes.len() == 1 {
|
||||
warn!("Episode {} of season {} is not available with {} audio", episodes[0].sequence_number, episodes[0].season_title, locales[0])
|
||||
} else if episodes.len() == 2 {
|
||||
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
|
||||
} else {
|
||||
unimplemented!()
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
},
|
||||
|scope, locales| {
|
||||
match scope {
|
||||
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} subtitles", series.title, locales[0]),
|
||||
FilterMediaScope::Season(season) => {
|
||||
warn!("Season {} is not available with {} subtitles", season.season_number, locales[0]);
|
||||
Ok(false)
|
||||
},
|
||||
FilterMediaScope::Episode(episodes) => {
|
||||
if episodes.len() == 1 {
|
||||
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, locales[0])
|
||||
} else if episodes.len() == 2 {
|
||||
warn!("Season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
|
||||
} else {
|
||||
unimplemented!()
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
},
|
||||
|season| {
|
||||
warn!("Skipping premium episodes in season {season}");
|
||||
Ok(())
|
||||
},
|
||||
Format::has_relative_fmt(&self.output),
|
||||
!self.yes,
|
||||
self.skip_specials,
|
||||
ctx.crunchy.premium().await,
|
||||
)
|
||||
.visit(media_collection)
|
||||
.await?;
|
||||
|
||||
if single_format_collection.is_empty() {
|
||||
progress_handler.stop(format!("Skipping url {} (no matching videos found)", i + 1));
|
||||
continue;
|
||||
}
|
||||
progress_handler.stop(format!("Loaded series information for url {}", i + 1));
|
||||
|
||||
single_format_collection.full_visual_output();
|
||||
|
||||
let download_builder =
|
||||
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
|
||||
.default_subtitle(self.subtitle.clone())
|
||||
.force_hardsub(self.force_hardsub)
|
||||
.output_format(if is_special_file(&self.output) || self.output == "-" {
|
||||
Some("mpegts".to_string())
|
||||
} else {
|
||||
None
|
||||
})
|
||||
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
|
||||
.ffmpeg_threads(self.ffmpeg_threads)
|
||||
.threads(self.threads)
|
||||
.audio_locale_output_map(HashMap::from([(
|
||||
self.audio.clone(),
|
||||
self.output_audio_locale.clone(),
|
||||
)]))
|
||||
.subtitle_locale_output_map(
|
||||
self.subtitle.as_ref().map_or(HashMap::new(), |s| {
|
||||
HashMap::from([(s.clone(), self.output_subtitle_locale.clone())])
|
||||
}),
|
||||
);
|
||||
|
||||
for mut single_formats in single_format_collection.into_iter() {
|
||||
// the vec contains always only one item
|
||||
let single_format = single_formats.remove(0);
|
||||
|
||||
let (download_format, format) = get_format(
|
||||
&self,
|
||||
&single_format,
|
||||
if self.force_hardsub {
|
||||
true
|
||||
} else if single_format.is_special() {
|
||||
!special_output_supports_softsubs
|
||||
} else {
|
||||
!output_supports_softsubs
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut downloader = download_builder.clone().build();
|
||||
downloader.add_format(download_format);
|
||||
|
||||
let formatted_path = if format.is_special() {
|
||||
format.format_path(
|
||||
self.output_specials
|
||||
.as_ref()
|
||||
.map_or((&self.output).into(), |so| so.into()),
|
||||
self.universal_output,
|
||||
self.language_tagging.as_ref(),
|
||||
)
|
||||
} else {
|
||||
format.format_path(
|
||||
(&self.output).into(),
|
||||
self.universal_output,
|
||||
self.language_tagging.as_ref(),
|
||||
)
|
||||
};
|
||||
let (path, changed) = free_file(formatted_path.clone());
|
||||
|
||||
if changed && self.skip_existing {
|
||||
debug!(
|
||||
"Skipping already existing file '{}'",
|
||||
formatted_path.to_string_lossy()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
format.visual_output(&path);
|
||||
|
||||
downloader.download(&path).await?
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_format(
|
||||
download: &Download,
|
||||
single_format: &SingleFormat,
|
||||
try_peer_hardsubs: bool,
|
||||
) -> Result<(DownloadFormat, Format)> {
|
||||
let stream = single_format.stream().await?;
|
||||
let Some((video, audio, contains_hardsub)) = stream_data_from_stream(
|
||||
&stream,
|
||||
&download.resolution,
|
||||
if try_peer_hardsubs {
|
||||
download.subtitle.clone()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
else {
|
||||
if single_format.is_episode() {
|
||||
bail!(
|
||||
"Resolution ({}) is not available for episode {} ({}) of {} season {}",
|
||||
download.resolution,
|
||||
single_format.episode_number,
|
||||
single_format.title,
|
||||
single_format.series_name,
|
||||
single_format.season_number,
|
||||
)
|
||||
} else {
|
||||
bail!(
|
||||
"Resolution ({}) is not available for {} ({})",
|
||||
download.resolution,
|
||||
single_format.source_type(),
|
||||
single_format.title
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let subtitle = if contains_hardsub {
|
||||
None
|
||||
} else if let Some(subtitle_locale) = &download.subtitle {
|
||||
if download.audio == Locale::ja_JP {
|
||||
stream
|
||||
.subtitles
|
||||
.get(subtitle_locale)
|
||||
// use closed captions as fallback if no actual subtitles are found
|
||||
.or_else(|| stream.captions.get(subtitle_locale))
|
||||
.cloned()
|
||||
} else {
|
||||
stream
|
||||
.captions
|
||||
.get(subtitle_locale)
|
||||
.or_else(|| stream.subtitles.get(subtitle_locale))
|
||||
.cloned()
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let download_format = DownloadFormat {
|
||||
video: (video.clone(), single_format.audio.clone()),
|
||||
audios: vec![(audio, single_format.audio.clone())],
|
||||
subtitles: subtitle.clone().map_or(vec![], |s| {
|
||||
vec![(
|
||||
s,
|
||||
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
|
||||
)]
|
||||
}),
|
||||
metadata: DownloadFormatMetadata {
|
||||
skip_events: if download.include_chapters {
|
||||
single_format.skip_events().await?
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
};
|
||||
let mut format = Format::from_single_formats(vec![(
|
||||
single_format.clone(),
|
||||
video,
|
||||
subtitle.map_or(vec![], |s| {
|
||||
vec![(
|
||||
s,
|
||||
single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
|
||||
)]
|
||||
}),
|
||||
)]);
|
||||
if contains_hardsub {
|
||||
let (_, subs) = format.locales.get_mut(0).unwrap();
|
||||
subs.push(download.subtitle.clone().unwrap())
|
||||
}
|
||||
|
||||
stream.invalidate().await?;
|
||||
|
||||
Ok((download_format, format))
|
||||
}
|
||||
3
crunchy-cli-core/src/download/mod.rs
Normal file
3
crunchy-cli-core/src/download/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
mod command;
|
||||
|
||||
pub use command::Download;
|
||||
405
crunchy-cli-core/src/lib.rs
Normal file
405
crunchy-cli-core/src/lib.rs
Normal file
|
|
@ -0,0 +1,405 @@
|
|||
use crate::utils::context::Context;
|
||||
use crate::utils::locale::system_locale;
|
||||
use crate::utils::log::{progress, CliLogger};
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use crunchyroll_rs::crunchyroll::CrunchyrollBuilder;
|
||||
use crunchyroll_rs::error::Error;
|
||||
use crunchyroll_rs::{Crunchyroll, Locale};
|
||||
use log::{debug, error, warn, LevelFilter};
|
||||
use reqwest::{Client, Proxy};
|
||||
use std::{env, fs};
|
||||
|
||||
mod archive;
|
||||
mod download;
|
||||
mod login;
|
||||
mod search;
|
||||
mod utils;
|
||||
|
||||
use crate::utils::rate_limit::RateLimiterService;
|
||||
pub use archive::Archive;
|
||||
use dialoguer::console::Term;
|
||||
pub use download::Download;
|
||||
pub use login::Login;
|
||||
pub use search::Search;
|
||||
|
||||
trait Execute {
|
||||
fn pre_check(&mut self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn execute(self, ctx: Context) -> Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(author, version = version(), about)]
|
||||
#[clap(name = "crunchy-cli")]
|
||||
pub struct Cli {
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
help = "Overwrite the language in which results are returned. Default is your system language"
|
||||
)]
|
||||
#[arg(global = true, long)]
|
||||
lang: Option<Locale>,
|
||||
|
||||
#[arg(
|
||||
help = "Enable experimental fixes which may resolve some unexpected errors. Generally not recommended as this flag may crash the program completely"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Enable experimental fixes which may resolve some unexpected errors. \
|
||||
It is not recommended to use this this flag regularly, it might cause unexpected errors which may crash the program completely. \
|
||||
If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \
|
||||
the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue"
|
||||
)]
|
||||
#[arg(global = true, long, default_value_t = false)]
|
||||
experimental_fixes: bool,
|
||||
|
||||
#[clap(flatten)]
|
||||
login_method: login::LoginMethod,
|
||||
|
||||
#[arg(help = "Use a proxy to route all traffic through")]
|
||||
#[arg(long_help = "Use a proxy to route all traffic through. \
|
||||
Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself. \
|
||||
Besides specifying a simple url, you also can partially control where a proxy should be used: '<url>:' only proxies api requests, ':<url>' only proxies download traffic, '<url>:<url>' proxies api requests through the first url and download traffic through the second url")]
|
||||
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_proxies)]
|
||||
proxy: Option<(Option<Proxy>, Option<Proxy>)>,
|
||||
|
||||
#[arg(help = "Use custom user agent")]
|
||||
#[arg(global = true, long)]
|
||||
user_agent: Option<String>,
|
||||
|
||||
#[arg(
|
||||
help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB]"
|
||||
)]
|
||||
#[arg(
|
||||
long_help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB] (e.g. 500KB or 10MB)"
|
||||
)]
|
||||
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_speed_limit)]
|
||||
speed_limit: Option<u32>,
|
||||
|
||||
#[clap(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
fn version() -> String {
|
||||
let package_version = env!("CARGO_PKG_VERSION");
|
||||
let git_commit_hash = env!("GIT_HASH");
|
||||
let build_date = env!("BUILD_DATE");
|
||||
|
||||
if git_commit_hash.is_empty() {
|
||||
package_version.to_string()
|
||||
} else {
|
||||
format!("{} ({} {})", package_version, git_commit_hash, build_date)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Command {
|
||||
Archive(Archive),
|
||||
Download(Download),
|
||||
Login(Login),
|
||||
Search(Search),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Verbosity {
|
||||
#[arg(help = "Verbose output")]
|
||||
#[arg(global = true, short, long)]
|
||||
verbose: bool,
|
||||
|
||||
#[arg(help = "Quiet output. Does not print anything unless it's a error")]
|
||||
#[arg(
|
||||
long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout"
|
||||
)]
|
||||
#[arg(global = true, short, long)]
|
||||
quiet: bool,
|
||||
}
|
||||
|
||||
pub async fn main(args: &[String]) {
|
||||
let mut cli: Cli = Cli::parse_from(args);
|
||||
|
||||
if cli.verbosity.verbose || cli.verbosity.quiet {
|
||||
if cli.verbosity.verbose && cli.verbosity.quiet {
|
||||
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
|
||||
std::process::exit(1)
|
||||
} else if cli.verbosity.verbose {
|
||||
CliLogger::init(LevelFilter::Debug).unwrap()
|
||||
} else if cli.verbosity.quiet {
|
||||
CliLogger::init(LevelFilter::Error).unwrap()
|
||||
}
|
||||
} else {
|
||||
CliLogger::init(LevelFilter::Info).unwrap()
|
||||
}
|
||||
|
||||
debug!("cli input: {:?}", cli);
|
||||
|
||||
match &mut cli.command {
|
||||
Command::Archive(archive) => {
|
||||
// prevent interactive select to be shown when output should be quiet
|
||||
if cli.verbosity.quiet {
|
||||
archive.yes = true;
|
||||
}
|
||||
pre_check_executor(archive).await
|
||||
}
|
||||
Command::Download(download) => {
|
||||
// prevent interactive select to be shown when output should be quiet
|
||||
if cli.verbosity.quiet {
|
||||
download.yes = true;
|
||||
}
|
||||
pre_check_executor(download).await
|
||||
}
|
||||
Command::Login(login) => {
|
||||
if login.remove {
|
||||
if let Some(session_file) = login::session_file_path() {
|
||||
let _ = fs::remove_file(session_file);
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
pre_check_executor(login).await
|
||||
}
|
||||
}
|
||||
Command::Search(search) => pre_check_executor(search).await,
|
||||
};
|
||||
|
||||
let ctx = match create_ctx(&mut cli).await {
|
||||
Ok(ctx) => ctx,
|
||||
Err(e) => {
|
||||
error!("{}", e);
|
||||
std::process::exit(1)
|
||||
}
|
||||
};
|
||||
debug!("Created context");
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
debug!("Ctrl-c detected");
|
||||
if let Ok(dir) = fs::read_dir(env::temp_dir()) {
|
||||
for file in dir.flatten() {
|
||||
if file
|
||||
.path()
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default()
|
||||
.starts_with(".crunchy-cli_")
|
||||
{
|
||||
if file.file_type().map_or(true, |ft| ft.is_file()) {
|
||||
let result = fs::remove_file(file.path());
|
||||
debug!(
|
||||
"Ctrl-c removed temporary file {} {}",
|
||||
file.path().to_string_lossy(),
|
||||
if result.is_ok() {
|
||||
"successfully"
|
||||
} else {
|
||||
"not successfully"
|
||||
}
|
||||
)
|
||||
} else {
|
||||
let result = fs::remove_dir_all(file.path());
|
||||
debug!(
|
||||
"Ctrl-c removed temporary directory {} {}",
|
||||
file.path().to_string_lossy(),
|
||||
if result.is_ok() {
|
||||
"successfully"
|
||||
} else {
|
||||
"not successfully"
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// when pressing ctrl-c while interactively choosing seasons the cursor stays hidden, this
|
||||
// line shows it again
|
||||
let _ = Term::stdout().show_cursor();
|
||||
std::process::exit(1)
|
||||
})
|
||||
.unwrap();
|
||||
debug!("Created ctrl-c handler");
|
||||
|
||||
match cli.command {
|
||||
Command::Archive(archive) => execute_executor(archive, ctx).await,
|
||||
Command::Download(download) => execute_executor(download, ctx).await,
|
||||
Command::Login(login) => execute_executor(login, ctx).await,
|
||||
Command::Search(search) => execute_executor(search, ctx).await,
|
||||
};
|
||||
}
|
||||
|
||||
async fn pre_check_executor(executor: &mut impl Execute) {
|
||||
if let Err(err) = executor.pre_check() {
|
||||
error!("Misconfigurations detected: {}", err);
|
||||
std::process::exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async fn execute_executor(executor: impl Execute, ctx: Context) {
|
||||
if let Err(mut err) = executor.execute(ctx).await {
|
||||
if let Some(crunchy_error) = err.downcast_mut::<Error>() {
|
||||
if let Error::Block { message, .. } = crunchy_error {
|
||||
*message = "Triggered Cloudflare bot protection. Try again later or use a VPN or proxy to spoof your location".to_string()
|
||||
}
|
||||
|
||||
error!("An error occurred: {}", crunchy_error)
|
||||
} else {
|
||||
error!("An error occurred: {}", err)
|
||||
}
|
||||
|
||||
std::process::exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_ctx(cli: &mut Cli) -> Result<Context> {
|
||||
let crunchy_client = reqwest_client(
|
||||
cli.proxy.as_ref().and_then(|p| p.0.clone()),
|
||||
cli.user_agent.clone(),
|
||||
);
|
||||
let internal_client = reqwest_client(
|
||||
cli.proxy.as_ref().and_then(|p| p.1.clone()),
|
||||
cli.user_agent.clone(),
|
||||
);
|
||||
|
||||
let crunchy = crunchyroll_session(
|
||||
cli,
|
||||
crunchy_client.clone(),
|
||||
cli.speed_limit
|
||||
.map(|l| RateLimiterService::new(l, crunchy_client)),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(Context {
|
||||
crunchy,
|
||||
client: internal_client.clone(),
|
||||
rate_limiter: cli
|
||||
.speed_limit
|
||||
.map(|l| RateLimiterService::new(l, internal_client)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn crunchyroll_session(
|
||||
cli: &mut Cli,
|
||||
client: Client,
|
||||
rate_limiter: Option<RateLimiterService>,
|
||||
) -> Result<Crunchyroll> {
|
||||
let supported_langs = vec![
|
||||
Locale::ar_ME,
|
||||
Locale::de_DE,
|
||||
Locale::en_US,
|
||||
Locale::es_ES,
|
||||
Locale::es_419,
|
||||
Locale::fr_FR,
|
||||
Locale::it_IT,
|
||||
Locale::pt_BR,
|
||||
Locale::pt_PT,
|
||||
Locale::ru_RU,
|
||||
];
|
||||
let locale = if let Some(lang) = &cli.lang {
|
||||
if !supported_langs.contains(lang) {
|
||||
bail!(
|
||||
"Via `--lang` specified language is not supported. Supported languages: {}",
|
||||
supported_langs
|
||||
.iter()
|
||||
.map(|l| format!("`{}` ({})", l, l.to_human_readable()))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
lang.clone()
|
||||
} else {
|
||||
let mut lang = system_locale();
|
||||
if !supported_langs.contains(&lang) {
|
||||
warn!("Recognized system locale is not supported. Using en-US as default. Use `--lang` to overwrite the used language");
|
||||
lang = Locale::en_US
|
||||
}
|
||||
lang
|
||||
};
|
||||
|
||||
let mut builder = Crunchyroll::builder()
|
||||
.locale(locale)
|
||||
.client(client.clone())
|
||||
.stabilization_locales(cli.experimental_fixes)
|
||||
.stabilization_season_number(cli.experimental_fixes);
|
||||
if let Command::Download(download) = &cli.command {
|
||||
builder = builder.preferred_audio_locale(download.audio.clone())
|
||||
}
|
||||
if let Some(rate_limiter) = rate_limiter {
|
||||
builder = builder.middleware(rate_limiter)
|
||||
}
|
||||
|
||||
let root_login_methods_count =
|
||||
cli.login_method.credentials.is_some() as u8 + cli.login_method.anonymous as u8;
|
||||
|
||||
let progress_handler = progress!("Logging in");
|
||||
if root_login_methods_count == 0 {
|
||||
if let Some(login_file_path) = login::session_file_path() {
|
||||
if login_file_path.exists() {
|
||||
let session = fs::read_to_string(login_file_path)?;
|
||||
if let Some((token_type, token)) = session.split_once(':') {
|
||||
match token_type {
|
||||
"refresh_token" => {
|
||||
return match builder.login_with_refresh_token(token).await {
|
||||
Ok(crunchy) => Ok(crunchy),
|
||||
Err(e) => {
|
||||
if let Error::Request { message, .. } = &e {
|
||||
if message.starts_with("invalid_grant") {
|
||||
bail!("The stored login is expired, please login again")
|
||||
}
|
||||
}
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
"etp_rt" => bail!("The stored login method (etp-rt) isn't supported anymore. Please login again using your credentials"),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
bail!("Could not read stored session ('{}')", session)
|
||||
}
|
||||
}
|
||||
bail!("Please use a login method ('--credentials' or '--anonymous')")
|
||||
} else if root_login_methods_count > 1 {
|
||||
bail!("Please use only one login method ('--credentials' or '--anonymous')")
|
||||
}
|
||||
|
||||
let crunchy = if let Some(credentials) = &cli.login_method.credentials {
|
||||
if let Some((email, password)) = credentials.split_once(':') {
|
||||
builder.login_with_credentials(email, password).await?
|
||||
} else {
|
||||
bail!("Invalid credentials format. Please provide your credentials as email:password")
|
||||
}
|
||||
} else if cli.login_method.anonymous {
|
||||
builder.login_anonymously().await?
|
||||
} else {
|
||||
bail!("should never happen")
|
||||
};
|
||||
|
||||
progress_handler.stop("Logged in");
|
||||
|
||||
Ok(crunchy)
|
||||
}
|
||||
|
||||
fn reqwest_client(proxy: Option<Proxy>, user_agent: Option<String>) -> Client {
|
||||
let mut builder = CrunchyrollBuilder::predefined_client_builder();
|
||||
if let Some(p) = proxy {
|
||||
builder = builder.proxy(p)
|
||||
}
|
||||
if let Some(ua) = user_agent {
|
||||
builder = builder.user_agent(ua)
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "openssl-tls", feature = "openssl-tls-static"))]
|
||||
let client = {
|
||||
let mut builder = builder.use_native_tls().tls_built_in_root_certs(false);
|
||||
|
||||
for certificate in rustls_native_certs::load_native_certs().unwrap() {
|
||||
builder =
|
||||
builder.add_root_certificate(reqwest::Certificate::from_der(&certificate).unwrap())
|
||||
}
|
||||
|
||||
builder.build().unwrap()
|
||||
};
|
||||
#[cfg(not(any(feature = "openssl-tls", feature = "openssl-tls-static")))]
|
||||
let client = builder.build().unwrap();
|
||||
|
||||
client
|
||||
}
|
||||
55
crunchy-cli-core/src/login/command.rs
Normal file
55
crunchy-cli-core/src/login/command.rs
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
use crate::utils::context::Context;
|
||||
use crate::Execute;
|
||||
use anyhow::bail;
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use crunchyroll_rs::crunchyroll::SessionToken;
|
||||
use log::info;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
#[clap(about = "Save your login credentials persistent on disk")]
|
||||
pub struct Login {
|
||||
#[arg(help = "Remove your stored credentials (instead of saving them)")]
|
||||
#[arg(long)]
|
||||
pub remove: bool,
|
||||
}
|
||||
|
||||
impl Execute for Login {
|
||||
async fn execute(self, ctx: Context) -> Result<()> {
|
||||
if let Some(login_file_path) = session_file_path() {
|
||||
fs::create_dir_all(login_file_path.parent().unwrap())?;
|
||||
|
||||
match ctx.crunchy.session_token().await {
|
||||
SessionToken::RefreshToken(refresh_token) => {
|
||||
fs::write(login_file_path, format!("refresh_token:{}", refresh_token))?
|
||||
}
|
||||
SessionToken::EtpRt(_) => bail!("Login with etp_rt isn't supported anymore. Please use your credentials to login"),
|
||||
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
|
||||
}
|
||||
|
||||
info!("Saved login");
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
bail!("Cannot find config path")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
pub struct LoginMethod {
|
||||
#[arg(
|
||||
help = "Login with credentials (email and password). Must be provided as email:password"
|
||||
)]
|
||||
#[arg(global = true, long)]
|
||||
pub credentials: Option<String>,
|
||||
#[arg(help = "Login anonymously / without an account")]
|
||||
#[arg(global = true, long, default_value_t = false)]
|
||||
pub anonymous: bool,
|
||||
}
|
||||
|
||||
pub fn session_file_path() -> Option<PathBuf> {
|
||||
dirs::config_dir().map(|config_dir| config_dir.join("crunchy-cli").join("session"))
|
||||
}
|
||||
3
crunchy-cli-core/src/login/mod.rs
Normal file
3
crunchy-cli-core/src/login/mod.rs
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
mod command;
|
||||
|
||||
pub use command::{session_file_path, Login, LoginMethod};
|
||||
222
crunchy-cli-core/src/search/command.rs
Normal file
222
crunchy-cli-core/src/search/command.rs
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
use crate::search::filter::FilterOptions;
|
||||
use crate::search::format::Format;
|
||||
use crate::utils::context::Context;
|
||||
use crate::utils::parse::{parse_url, UrlFilter};
|
||||
use crate::Execute;
|
||||
use anyhow::{bail, Result};
|
||||
use crunchyroll_rs::common::StreamExt;
|
||||
use crunchyroll_rs::search::QueryResults;
|
||||
use crunchyroll_rs::{Episode, Locale, MediaCollection, MovieListing, MusicVideo, Series};
|
||||
use log::warn;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
#[clap(about = "Search in videos")]
|
||||
#[command(arg_required_else_help(true))]
|
||||
pub struct Search {
|
||||
#[arg(help = format!("Audio languages to include. \
|
||||
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
|
||||
#[arg(long_help = format!("Audio languages to include. \
|
||||
Available languages are:\n {}", Locale::all().into_iter().map(|l| format!("{:<6} → {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
|
||||
#[arg(long, default_values_t = vec![crate::utils::locale::system_locale()])]
|
||||
audio: Vec<Locale>,
|
||||
|
||||
#[arg(help = "Limit of search top search results")]
|
||||
#[arg(long, default_value_t = 5)]
|
||||
search_top_results_limit: u32,
|
||||
#[arg(help = "Limit of search series results")]
|
||||
#[arg(long, default_value_t = 0)]
|
||||
search_series_limit: u32,
|
||||
#[arg(help = "Limit of search movie listing results")]
|
||||
#[arg(long, default_value_t = 0)]
|
||||
search_movie_listing_limit: u32,
|
||||
#[arg(help = "Limit of search episode results")]
|
||||
#[arg(long, default_value_t = 0)]
|
||||
search_episode_limit: u32,
|
||||
#[arg(help = "Limit of search music results")]
|
||||
#[arg(long, default_value_t = 0)]
|
||||
search_music_limit: u32,
|
||||
|
||||
/// Format of the output text.
|
||||
///
|
||||
/// You can specify keywords in a specific pattern and they will get replaced in the output text.
|
||||
/// The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
|
||||
/// For example, if you want to get the title of an episode, you can use `Title {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title
|
||||
///
|
||||
/// See the following list for all keywords and their meaning:
|
||||
/// series.id → Series id
|
||||
/// series.title → Series title
|
||||
/// series.description → Series description
|
||||
/// series.release_year → Series release year
|
||||
///
|
||||
/// season.id → Season id
|
||||
/// season.title → Season title
|
||||
/// season.description → Season description
|
||||
/// season.number → Season number
|
||||
/// season.episodes → Number of episodes the season has
|
||||
///
|
||||
/// episode.id → Episode id
|
||||
/// episode.title → Episode title
|
||||
/// episode.description → Episode description
|
||||
/// episode.locale → Episode locale/language
|
||||
/// episode.number → Episode number
|
||||
/// episode.sequence_number → Episode number. This number is unique unlike `episode.number` which sometimes can be duplicated
|
||||
/// episode.duration → Episode duration in milliseconds
|
||||
/// episode.air_date → Episode air date as unix timestamp
|
||||
/// episode.premium_only → If the episode is only available with Crunchyroll premium
|
||||
///
|
||||
/// movie_listing.id → Movie listing id
|
||||
/// movie_listing.title → Movie listing title
|
||||
/// movie_listing.description → Movie listing description
|
||||
///
|
||||
/// movie.id → Movie id
|
||||
/// movie.title → Movie title
|
||||
/// movie.description → Movie description
|
||||
/// movie.duration → Movie duration in milliseconds
|
||||
/// movie.premium_only → If the movie is only available with Crunchyroll premium
|
||||
///
|
||||
/// music_video.id → Music video id
|
||||
/// music_video.title → Music video title
|
||||
/// music_video.description → Music video description
|
||||
/// music_video.duration → Music video duration in milliseconds
|
||||
/// music_video.premium_only → If the music video is only available with Crunchyroll premium
|
||||
///
|
||||
/// concert.id → Concert id
|
||||
/// concert.title → Concert title
|
||||
/// concert.description → Concert description
|
||||
/// concert.duration → Concert duration in milliseconds
|
||||
/// concert.premium_only → If the concert is only available with Crunchyroll premium
|
||||
///
|
||||
/// stream.locale → Stream locale/language
|
||||
/// stream.dash_url → Stream url in DASH format. You need to set the `Authorization` header to `Bearer <account.token>` when requesting this url
|
||||
/// stream.is_drm → If `stream.dash_url` is DRM encrypted
|
||||
///
|
||||
/// subtitle.locale → Subtitle locale/language
|
||||
/// subtitle.url → Url to the subtitle
|
||||
///
|
||||
/// account.token → Access token to make request to restricted endpoints. This token is only valid for a max. of 5 minutes
|
||||
/// account.id → Internal ID of the user account
|
||||
/// account.profile_name → Profile name of the account
|
||||
/// account.email → Email address of the account
|
||||
#[arg(short, long, verbatim_doc_comment)]
|
||||
#[arg(default_value = "S{{season.number}}E{{episode.number}} - {{episode.title}}")]
|
||||
output: String,
|
||||
|
||||
input: String,
|
||||
}
|
||||
|
||||
impl Execute for Search {
|
||||
async fn execute(self, ctx: Context) -> Result<()> {
|
||||
if !ctx.crunchy.premium().await {
|
||||
warn!("Using `search` anonymously or with a non-premium account may return incomplete results")
|
||||
}
|
||||
|
||||
if self.output.contains("{{stream.is_drm}}") {
|
||||
warn!("The `{{{{stream.is_drm}}}}` option is deprecated as it isn't reliable anymore and will be removed soon")
|
||||
}
|
||||
|
||||
let input = if crunchyroll_rs::parse::parse_url(&self.input).is_some() {
|
||||
match parse_url(&ctx.crunchy, self.input.clone(), true).await {
|
||||
Ok(ok) => vec![ok],
|
||||
Err(e) => bail!("url {} could not be parsed: {}", self.input, e),
|
||||
}
|
||||
} else {
|
||||
let mut output = vec![];
|
||||
|
||||
let query = resolve_query(&self, ctx.crunchy.query(&self.input)).await?;
|
||||
output.extend(query.0.into_iter().map(|m| (m, UrlFilter::default())));
|
||||
output.extend(
|
||||
query
|
||||
.1
|
||||
.into_iter()
|
||||
.map(|s| (s.into(), UrlFilter::default())),
|
||||
);
|
||||
output.extend(
|
||||
query
|
||||
.2
|
||||
.into_iter()
|
||||
.map(|m| (m.into(), UrlFilter::default())),
|
||||
);
|
||||
output.extend(
|
||||
query
|
||||
.3
|
||||
.into_iter()
|
||||
.map(|e| (e.into(), UrlFilter::default())),
|
||||
);
|
||||
output.extend(
|
||||
query
|
||||
.4
|
||||
.into_iter()
|
||||
.map(|m| (m.into(), UrlFilter::default())),
|
||||
);
|
||||
|
||||
output
|
||||
};
|
||||
|
||||
let crunchy_arc = Arc::new(ctx.crunchy);
|
||||
for (media_collection, url_filter) in input {
|
||||
let filter_options = FilterOptions {
|
||||
audio: self.audio.clone(),
|
||||
url_filter,
|
||||
};
|
||||
|
||||
let format = Format::new(self.output.clone(), filter_options, crunchy_arc.clone())?;
|
||||
println!("{}", format.parse(media_collection).await?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! resolve_query {
|
||||
($limit:expr, $vec:expr, $item:expr) => {
|
||||
if $limit > 0 {
|
||||
let mut item_results = $item;
|
||||
while let Some(item) = item_results.next().await {
|
||||
$vec.push(item?);
|
||||
if $vec.len() >= $limit as usize {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async fn resolve_query(
|
||||
search: &Search,
|
||||
query_results: QueryResults,
|
||||
) -> Result<(
|
||||
Vec<MediaCollection>,
|
||||
Vec<Series>,
|
||||
Vec<MovieListing>,
|
||||
Vec<Episode>,
|
||||
Vec<MusicVideo>,
|
||||
)> {
|
||||
let mut media_collection = vec![];
|
||||
let mut series = vec![];
|
||||
let mut movie_listing = vec![];
|
||||
let mut episode = vec![];
|
||||
let mut music_video = vec![];
|
||||
|
||||
resolve_query!(
|
||||
search.search_top_results_limit,
|
||||
media_collection,
|
||||
query_results.top_results
|
||||
);
|
||||
resolve_query!(search.search_series_limit, series, query_results.series);
|
||||
resolve_query!(
|
||||
search.search_movie_listing_limit,
|
||||
movie_listing,
|
||||
query_results.movie_listing
|
||||
);
|
||||
resolve_query!(search.search_episode_limit, episode, query_results.episode);
|
||||
resolve_query!(search.search_music_limit, music_video, query_results.music);
|
||||
|
||||
Ok((
|
||||
media_collection,
|
||||
series,
|
||||
movie_listing,
|
||||
episode,
|
||||
music_video,
|
||||
))
|
||||
}
|
||||
47
crunchy-cli-core/src/search/filter.rs
Normal file
47
crunchy-cli-core/src/search/filter.rs
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
use crate::utils::parse::UrlFilter;
|
||||
use crunchyroll_rs::{Episode, Locale, MovieListing, Season, Series};
|
||||
|
||||
pub struct FilterOptions {
|
||||
pub audio: Vec<Locale>,
|
||||
pub url_filter: UrlFilter,
|
||||
}
|
||||
|
||||
impl FilterOptions {
|
||||
pub fn check_series(&self, series: &Series) -> bool {
|
||||
self.check_audio_language(&series.audio_locales)
|
||||
}
|
||||
|
||||
pub fn filter_seasons(&self, mut seasons: Vec<Season>) -> Vec<Season> {
|
||||
seasons.retain(|s| {
|
||||
self.check_audio_language(&s.audio_locales)
|
||||
&& self.url_filter.is_season_valid(s.season_number)
|
||||
});
|
||||
seasons
|
||||
}
|
||||
|
||||
pub fn filter_episodes(&self, mut episodes: Vec<Episode>) -> Vec<Episode> {
|
||||
episodes.retain(|e| {
|
||||
self.check_audio_language(&[e.audio_locale.clone()])
|
||||
&& self
|
||||
.url_filter
|
||||
.is_episode_valid(e.sequence_number, e.season_number)
|
||||
});
|
||||
episodes
|
||||
}
|
||||
|
||||
pub fn check_movie_listing(&self, movie_listing: &MovieListing) -> bool {
|
||||
self.check_audio_language(
|
||||
&movie_listing
|
||||
.audio_locale
|
||||
.clone()
|
||||
.map_or(vec![], |a| vec![a.clone()]),
|
||||
)
|
||||
}
|
||||
|
||||
fn check_audio_language(&self, audio: &[Locale]) -> bool {
|
||||
if !self.audio.is_empty() {
|
||||
return self.audio.iter().any(|a| audio.contains(a));
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
687
crunchy-cli-core/src/search/format.rs
Normal file
687
crunchy-cli-core/src/search/format.rs
Normal file
|
|
@ -0,0 +1,687 @@
|
|||
use crate::search::filter::FilterOptions;
|
||||
use anyhow::{bail, Result};
|
||||
use crunchyroll_rs::media::{Stream, Subtitle};
|
||||
use crunchyroll_rs::{
|
||||
Concert, Crunchyroll, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo,
|
||||
Season, Series,
|
||||
};
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
use serde_json::{Map, Value};
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatSeries {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub release_year: u32,
|
||||
}
|
||||
|
||||
impl From<&Series> for FormatSeries {
|
||||
fn from(value: &Series) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
release_year: value.series_launch_year.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatSeason {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub number: u32,
|
||||
pub episodes: u32,
|
||||
}
|
||||
|
||||
impl From<&Season> for FormatSeason {
|
||||
fn from(value: &Season) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
number: value.season_number,
|
||||
episodes: value.number_of_episodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatEpisode {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub locale: Locale,
|
||||
pub number: u32,
|
||||
pub sequence_number: f32,
|
||||
pub duration: i64,
|
||||
pub air_date: i64,
|
||||
pub premium_only: bool,
|
||||
}
|
||||
|
||||
impl From<&Episode> for FormatEpisode {
|
||||
fn from(value: &Episode) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
locale: value.audio_locale.clone(),
|
||||
number: value.episode_number.unwrap_or_default(),
|
||||
sequence_number: value.sequence_number,
|
||||
duration: value.duration.num_milliseconds(),
|
||||
air_date: value.episode_air_date.timestamp(),
|
||||
premium_only: value.is_premium_only,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatMovieListing {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
impl From<&MovieListing> for FormatMovieListing {
|
||||
fn from(value: &MovieListing) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatMovie {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub duration: i64,
|
||||
pub premium_only: bool,
|
||||
}
|
||||
|
||||
impl From<&Movie> for FormatMovie {
|
||||
fn from(value: &Movie) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
duration: value.duration.num_milliseconds(),
|
||||
premium_only: value.is_premium_only,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatMusicVideo {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub duration: i64,
|
||||
pub premium_only: bool,
|
||||
}
|
||||
|
||||
impl From<&MusicVideo> for FormatMusicVideo {
|
||||
fn from(value: &MusicVideo) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
duration: value.duration.num_milliseconds(),
|
||||
premium_only: value.is_premium_only,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatConcert {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub duration: i64,
|
||||
pub premium_only: bool,
|
||||
}
|
||||
|
||||
impl From<&Concert> for FormatConcert {
|
||||
fn from(value: &Concert) -> Self {
|
||||
Self {
|
||||
id: value.id.clone(),
|
||||
title: value.title.clone(),
|
||||
description: value.description.clone(),
|
||||
duration: value.duration.num_milliseconds(),
|
||||
premium_only: value.is_premium_only,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatStream {
|
||||
pub locale: Locale,
|
||||
pub dash_url: String,
|
||||
pub is_drm: bool,
|
||||
}
|
||||
|
||||
impl From<&Stream> for FormatStream {
|
||||
fn from(value: &Stream) -> Self {
|
||||
Self {
|
||||
locale: value.audio_locale.clone(),
|
||||
dash_url: value.url.clone(),
|
||||
is_drm: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatSubtitle {
|
||||
pub locale: Locale,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
impl From<&Subtitle> for FormatSubtitle {
|
||||
fn from(value: &Subtitle) -> Self {
|
||||
Self {
|
||||
locale: value.locale.clone(),
|
||||
url: value.url.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct FormatAccount {
|
||||
pub token: String,
|
||||
pub id: String,
|
||||
pub profile_name: String,
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
impl FormatAccount {
|
||||
pub async fn async_from(value: &Crunchyroll) -> Result<Self> {
|
||||
let account = value.account().await?;
|
||||
|
||||
Ok(Self {
|
||||
token: value.access_token().await,
|
||||
id: account.account_id,
|
||||
profile_name: account.profile_name,
|
||||
email: account.email,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
enum Scope {
|
||||
Series,
|
||||
Season,
|
||||
Episode,
|
||||
MovieListing,
|
||||
Movie,
|
||||
MusicVideo,
|
||||
Concert,
|
||||
Stream,
|
||||
Subtitle,
|
||||
Account,
|
||||
}
|
||||
|
||||
macro_rules! must_match_if_true {
|
||||
($condition:expr => $media_collection:ident | $field:pat => $expr:expr) => {
|
||||
if $condition {
|
||||
match &$media_collection {
|
||||
$field => Some($expr),
|
||||
_ => panic!(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Format {
|
||||
pattern: Vec<(Range<usize>, Scope, String)>,
|
||||
pattern_count: HashMap<Scope, u32>,
|
||||
input: String,
|
||||
filter_options: FilterOptions,
|
||||
crunchyroll: Arc<Crunchyroll>,
|
||||
}
|
||||
|
||||
impl Format {
|
||||
pub fn new(
|
||||
input: String,
|
||||
filter_options: FilterOptions,
|
||||
crunchyroll: Arc<Crunchyroll>,
|
||||
) -> Result<Self> {
|
||||
let scope_regex = Regex::new(r"(?m)\{\{\s*(?P<scope>\w+)\.(?P<field>\w+)\s*}}").unwrap();
|
||||
let mut pattern = vec![];
|
||||
let mut pattern_count = HashMap::new();
|
||||
|
||||
macro_rules! generate_field_check {
|
||||
($($scope:expr => $struct_:ident)+) => {
|
||||
HashMap::from([
|
||||
$(
|
||||
(
|
||||
$scope,
|
||||
serde_json::from_value::<Map<String, Value>>(serde_json::to_value($struct_::default()).unwrap()).unwrap()
|
||||
)
|
||||
),+
|
||||
])
|
||||
};
|
||||
}
|
||||
let field_check = generate_field_check!(
|
||||
Scope::Series => FormatSeries
|
||||
Scope::Season => FormatSeason
|
||||
Scope::Episode => FormatEpisode
|
||||
Scope::MovieListing => FormatMovieListing
|
||||
Scope::Movie => FormatMovie
|
||||
Scope::MusicVideo => FormatMusicVideo
|
||||
Scope::Concert => FormatConcert
|
||||
Scope::Stream => FormatStream
|
||||
Scope::Subtitle => FormatSubtitle
|
||||
Scope::Account => FormatAccount
|
||||
);
|
||||
|
||||
for capture in scope_regex.captures_iter(&input) {
|
||||
let full = capture.get(0).unwrap();
|
||||
let scope = capture.name("scope").unwrap().as_str();
|
||||
let field = capture.name("field").unwrap().as_str();
|
||||
|
||||
let format_pattern_scope = match scope {
|
||||
"series" => Scope::Series,
|
||||
"season" => Scope::Season,
|
||||
"episode" => Scope::Episode,
|
||||
"movie_listing" => Scope::MovieListing,
|
||||
"movie" => Scope::Movie,
|
||||
"music_video" => Scope::MusicVideo,
|
||||
"concert" => Scope::Concert,
|
||||
"stream" => Scope::Stream,
|
||||
"subtitle" => Scope::Subtitle,
|
||||
"account" => Scope::Account,
|
||||
_ => bail!("'{}.{}' is not a valid keyword", scope, field),
|
||||
};
|
||||
|
||||
if field_check
|
||||
.get(&format_pattern_scope)
|
||||
.unwrap()
|
||||
.get(field)
|
||||
.is_none()
|
||||
{
|
||||
bail!("'{}.{}' is not a valid keyword", scope, field)
|
||||
}
|
||||
|
||||
pattern.push((
|
||||
full.start()..full.end(),
|
||||
format_pattern_scope.clone(),
|
||||
field.to_string(),
|
||||
));
|
||||
*pattern_count.entry(format_pattern_scope).or_default() += 1
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
pattern,
|
||||
pattern_count,
|
||||
input,
|
||||
filter_options,
|
||||
crunchyroll,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn parse(&self, media_collection: MediaCollection) -> Result<String> {
|
||||
match &media_collection {
|
||||
MediaCollection::Series(_)
|
||||
| MediaCollection::Season(_)
|
||||
| MediaCollection::Episode(_) => {
|
||||
self.check_scopes(vec![
|
||||
Scope::Series,
|
||||
Scope::Season,
|
||||
Scope::Episode,
|
||||
Scope::Stream,
|
||||
Scope::Subtitle,
|
||||
Scope::Account,
|
||||
])?;
|
||||
|
||||
self.parse_series(media_collection).await
|
||||
}
|
||||
MediaCollection::MovieListing(_) | MediaCollection::Movie(_) => {
|
||||
self.check_scopes(vec![
|
||||
Scope::MovieListing,
|
||||
Scope::Movie,
|
||||
Scope::Stream,
|
||||
Scope::Subtitle,
|
||||
Scope::Account,
|
||||
])?;
|
||||
|
||||
self.parse_movie_listing(media_collection).await
|
||||
}
|
||||
MediaCollection::MusicVideo(_) => {
|
||||
self.check_scopes(vec![
|
||||
Scope::MusicVideo,
|
||||
Scope::Stream,
|
||||
Scope::Subtitle,
|
||||
Scope::Account,
|
||||
])?;
|
||||
|
||||
self.parse_music_video(media_collection).await
|
||||
}
|
||||
MediaCollection::Concert(_) => {
|
||||
self.check_scopes(vec![
|
||||
Scope::Concert,
|
||||
Scope::Stream,
|
||||
Scope::Subtitle,
|
||||
Scope::Account,
|
||||
])?;
|
||||
|
||||
self.parse_concert(media_collection).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn parse_series(&self, media_collection: MediaCollection) -> Result<String> {
|
||||
let series_empty = self.check_pattern_count_empty(Scope::Series);
|
||||
let season_empty = self.check_pattern_count_empty(Scope::Season);
|
||||
let episode_empty = self.check_pattern_count_empty(Scope::Episode);
|
||||
let stream_empty = self.check_pattern_count_empty(Scope::Stream)
|
||||
&& self.check_pattern_count_empty(Scope::Subtitle);
|
||||
let account_empty = self.check_pattern_count_empty(Scope::Account);
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
let mut tree: Vec<(Season, Vec<(Episode, Vec<Stream>)>)> = vec![];
|
||||
|
||||
let series = if !series_empty {
|
||||
let series = match &media_collection {
|
||||
MediaCollection::Series(series) => series.clone(),
|
||||
MediaCollection::Season(season) => season.series().await?,
|
||||
MediaCollection::Episode(episode) => episode.series().await?,
|
||||
_ => panic!(),
|
||||
};
|
||||
if !self.filter_options.check_series(&series) {
|
||||
return Ok("".to_string());
|
||||
}
|
||||
series
|
||||
} else {
|
||||
Series::default()
|
||||
};
|
||||
if !season_empty || !episode_empty || !stream_empty {
|
||||
let tmp_seasons = match &media_collection {
|
||||
MediaCollection::Series(series) => series.seasons().await?,
|
||||
MediaCollection::Season(season) => vec![season.clone()],
|
||||
MediaCollection::Episode(_) => vec![],
|
||||
_ => panic!(),
|
||||
};
|
||||
let mut seasons = vec![];
|
||||
for season in tmp_seasons {
|
||||
seasons.push(season.clone());
|
||||
for version in season.versions {
|
||||
if season.id == version.id {
|
||||
continue;
|
||||
}
|
||||
if self.filter_options.audio.contains(&version.audio_locale) {
|
||||
seasons.push(version.season().await?)
|
||||
}
|
||||
}
|
||||
}
|
||||
tree.extend(
|
||||
self.filter_options
|
||||
.filter_seasons(seasons)
|
||||
.into_iter()
|
||||
.map(|s| (s, vec![])),
|
||||
)
|
||||
} else {
|
||||
tree.push((Season::default(), vec![]))
|
||||
}
|
||||
if !episode_empty || !stream_empty {
|
||||
match &media_collection {
|
||||
MediaCollection::Episode(episode) => {
|
||||
let mut episodes = vec![episode.clone()];
|
||||
for version in &episode.versions {
|
||||
if episode.id == version.id {
|
||||
continue;
|
||||
}
|
||||
if self.filter_options.audio.contains(&version.audio_locale) {
|
||||
episodes.push(version.episode().await?)
|
||||
}
|
||||
}
|
||||
tree.push((
|
||||
Season::default(),
|
||||
episodes
|
||||
.into_iter()
|
||||
.filter(|e| self.filter_options.audio.contains(&e.audio_locale))
|
||||
.map(|e| (e, vec![]))
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
for (season, episodes) in tree.iter_mut() {
|
||||
episodes.extend(
|
||||
self.filter_options
|
||||
.filter_episodes(season.episodes().await?)
|
||||
.into_iter()
|
||||
.map(|e| (e, vec![])),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
for (_, episodes) in tree.iter_mut() {
|
||||
episodes.push((Episode::default(), vec![]))
|
||||
}
|
||||
}
|
||||
if !stream_empty {
|
||||
for (_, episodes) in tree.iter_mut() {
|
||||
for (episode, streams) in episodes {
|
||||
let stream = episode.stream_maybe_without_drm().await?;
|
||||
stream.clone().invalidate().await?;
|
||||
streams.push(stream)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (_, episodes) in tree.iter_mut() {
|
||||
for (_, streams) in episodes {
|
||||
streams.push(Stream::default())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut output = vec![];
|
||||
let account_map = if !account_empty {
|
||||
self.serializable_to_json_map(FormatAccount::async_from(&self.crunchyroll).await?)
|
||||
} else {
|
||||
Map::default()
|
||||
};
|
||||
let series_map = self.serializable_to_json_map(FormatSeries::from(&series));
|
||||
for (season, episodes) in tree {
|
||||
let season_map = self.serializable_to_json_map(FormatSeason::from(&season));
|
||||
for (episode, streams) in episodes {
|
||||
let episode_map = self.serializable_to_json_map(FormatEpisode::from(&episode));
|
||||
for stream in streams {
|
||||
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||
|
||||
output.push(
|
||||
self.replace_all(
|
||||
HashMap::from([
|
||||
(Scope::Account, &account_map),
|
||||
(Scope::Series, &series_map),
|
||||
(Scope::Season, &season_map),
|
||||
(Scope::Episode, &episode_map),
|
||||
(Scope::Stream, &stream_map),
|
||||
]),
|
||||
stream,
|
||||
)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output.join("\n"))
|
||||
}
|
||||
|
||||
async fn parse_movie_listing(&self, media_collection: MediaCollection) -> Result<String> {
|
||||
let movie_listing_empty = self.check_pattern_count_empty(Scope::MovieListing);
|
||||
let movie_empty = self.check_pattern_count_empty(Scope::Movie);
|
||||
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||
|
||||
let mut tree: Vec<(Movie, Vec<Stream>)> = vec![];
|
||||
|
||||
let movie_listing = if !movie_listing_empty {
|
||||
let movie_listing = match &media_collection {
|
||||
MediaCollection::MovieListing(movie_listing) => movie_listing.clone(),
|
||||
MediaCollection::Movie(movie) => movie.movie_listing().await?,
|
||||
_ => panic!(),
|
||||
};
|
||||
if !self.filter_options.check_movie_listing(&movie_listing) {
|
||||
return Ok("".to_string());
|
||||
}
|
||||
movie_listing
|
||||
} else {
|
||||
MovieListing::default()
|
||||
};
|
||||
if !movie_empty || !stream_empty {
|
||||
let movies = match &media_collection {
|
||||
MediaCollection::MovieListing(movie_listing) => movie_listing.movies().await?,
|
||||
MediaCollection::Movie(movie) => vec![movie.clone()],
|
||||
_ => panic!(),
|
||||
};
|
||||
tree.extend(movies.into_iter().map(|m| (m, vec![])))
|
||||
}
|
||||
if !stream_empty {
|
||||
for (movie, streams) in tree.iter_mut() {
|
||||
streams.push(movie.stream_maybe_without_drm().await?)
|
||||
}
|
||||
} else {
|
||||
for (_, streams) in tree.iter_mut() {
|
||||
streams.push(Stream::default())
|
||||
}
|
||||
}
|
||||
|
||||
let mut output = vec![];
|
||||
let movie_listing_map =
|
||||
self.serializable_to_json_map(FormatMovieListing::from(&movie_listing));
|
||||
for (movie, streams) in tree {
|
||||
let movie_map = self.serializable_to_json_map(FormatMovie::from(&movie));
|
||||
for stream in streams {
|
||||
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||
|
||||
output.push(
|
||||
self.replace_all(
|
||||
HashMap::from([
|
||||
(Scope::MovieListing, &movie_listing_map),
|
||||
(Scope::Movie, &movie_map),
|
||||
(Scope::Stream, &stream_map),
|
||||
]),
|
||||
stream,
|
||||
)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output.join("\n"))
|
||||
}
|
||||
|
||||
async fn parse_music_video(&self, media_collection: MediaCollection) -> Result<String> {
|
||||
let music_video_empty = self.check_pattern_count_empty(Scope::MusicVideo);
|
||||
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||
|
||||
let music_video = must_match_if_true!(!music_video_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.clone()).unwrap_or_default();
|
||||
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.stream_maybe_without_drm().await?).unwrap_or_default();
|
||||
|
||||
let music_video_map = self.serializable_to_json_map(FormatMusicVideo::from(&music_video));
|
||||
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||
|
||||
let output = self
|
||||
.replace_all(
|
||||
HashMap::from([
|
||||
(Scope::MusicVideo, &music_video_map),
|
||||
(Scope::Stream, &stream_map),
|
||||
]),
|
||||
stream,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
async fn parse_concert(&self, media_collection: MediaCollection) -> Result<String> {
|
||||
let concert_empty = self.check_pattern_count_empty(Scope::Concert);
|
||||
let stream_empty = self.check_pattern_count_empty(Scope::Stream);
|
||||
|
||||
let concert = must_match_if_true!(!concert_empty => media_collection|MediaCollection::Concert(concert) => concert.clone()).unwrap_or_default();
|
||||
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::Concert(concert) => concert.stream_maybe_without_drm().await?).unwrap_or_default();
|
||||
|
||||
let concert_map = self.serializable_to_json_map(FormatConcert::from(&concert));
|
||||
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
|
||||
|
||||
let output = self
|
||||
.replace_all(
|
||||
HashMap::from([(Scope::Concert, &concert_map), (Scope::Stream, &stream_map)]),
|
||||
stream,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn serializable_to_json_map<S: Serialize>(&self, s: S) -> Map<String, Value> {
|
||||
serde_json::from_value(serde_json::to_value(s).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
fn check_pattern_count_empty(&self, scope: Scope) -> bool {
|
||||
self.pattern_count.get(&scope).cloned().unwrap_or_default() == 0
|
||||
}
|
||||
|
||||
fn check_scopes(&self, available_scopes: Vec<Scope>) -> Result<()> {
|
||||
for (_, scope, field) in self.pattern.iter() {
|
||||
if !available_scopes.contains(scope) {
|
||||
bail!(
|
||||
"'{}.{}' is not a valid keyword",
|
||||
format!("{:?}", scope).to_lowercase(),
|
||||
field
|
||||
)
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn replace_all(
|
||||
&self,
|
||||
values: HashMap<Scope, &Map<String, Value>>,
|
||||
mut stream: Stream,
|
||||
) -> Option<String> {
|
||||
if stream.subtitles.is_empty() {
|
||||
if !self.check_pattern_count_empty(Scope::Subtitle) {
|
||||
return None;
|
||||
}
|
||||
stream
|
||||
.subtitles
|
||||
.insert(Locale::Custom("".to_string()), Subtitle::default());
|
||||
}
|
||||
|
||||
let mut output = vec![];
|
||||
for (_, subtitle) in stream.subtitles {
|
||||
let subtitle_map = self.serializable_to_json_map(FormatSubtitle::from(&subtitle));
|
||||
let mut tmp_values = values.clone();
|
||||
tmp_values.insert(Scope::Subtitle, &subtitle_map);
|
||||
output.push(self.replace(tmp_values))
|
||||
}
|
||||
|
||||
Some(output.join("\n"))
|
||||
}
|
||||
|
||||
fn replace(&self, values: HashMap<Scope, &Map<String, Value>>) -> String {
|
||||
let mut output = self.input.clone();
|
||||
let mut offset = 0;
|
||||
for (range, scope, field) in &self.pattern {
|
||||
let item =
|
||||
serde_plain::to_string(values.get(scope).unwrap().get(field.as_str()).unwrap())
|
||||
.unwrap();
|
||||
let start = (range.start as i32 + offset) as usize;
|
||||
let end = (range.end as i32 + offset) as usize;
|
||||
output.replace_range(start..end, &item);
|
||||
offset += item.len() as i32 - range.len() as i32;
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
}
|
||||
5
crunchy-cli-core/src/search/mod.rs
Normal file
5
crunchy-cli-core/src/search/mod.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
mod command;
|
||||
mod filter;
|
||||
mod format;
|
||||
|
||||
pub use command::Search;
|
||||
61
crunchy-cli-core/src/utils/clap.rs
Normal file
61
crunchy-cli-core/src/utils/clap.rs
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
use crate::utils::parse::parse_resolution;
|
||||
use crunchyroll_rs::media::Resolution;
|
||||
use regex::Regex;
|
||||
use reqwest::Proxy;
|
||||
|
||||
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
|
||||
parse_resolution(s.to_string()).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
pub fn clap_parse_proxies(s: &str) -> Result<(Option<Proxy>, Option<Proxy>), String> {
|
||||
let double_proxy_regex =
|
||||
Regex::new(r"^(?P<first>(https?|socks5h?)://.+):(?P<second>(https?|socks5h?)://.+)$")
|
||||
.unwrap();
|
||||
|
||||
if let Some(capture) = double_proxy_regex.captures(s) {
|
||||
// checks if the input is formatted like 'https://example.com:socks5://examples.com' and
|
||||
// splits the string into 2 separate proxies at the middle colon
|
||||
|
||||
let first = capture.name("first").unwrap().as_str();
|
||||
let second = capture.name("second").unwrap().as_str();
|
||||
Ok((
|
||||
Some(Proxy::all(first).map_err(|e| format!("first proxy: {e}"))?),
|
||||
Some(Proxy::all(second).map_err(|e| format!("second proxy: {e}"))?),
|
||||
))
|
||||
} else if s.starts_with(':') {
|
||||
// checks if the input is formatted like ':https://example.com' and returns a proxy on the
|
||||
// second tuple position
|
||||
Ok((
|
||||
None,
|
||||
Some(Proxy::all(s.trim_start_matches(':')).map_err(|e| e.to_string())?),
|
||||
))
|
||||
} else if s.ends_with(':') {
|
||||
// checks if the input is formatted like 'https://example.com:' and returns a proxy on the
|
||||
// first tuple position
|
||||
Ok((
|
||||
Some(Proxy::all(s.trim_end_matches(':')).map_err(|e| e.to_string())?),
|
||||
None,
|
||||
))
|
||||
} else {
|
||||
// returns the same proxy for both tuple positions
|
||||
let proxy = Proxy::all(s).map_err(|e| e.to_string())?;
|
||||
Ok((Some(proxy.clone()), Some(proxy)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clap_parse_speed_limit(s: &str) -> Result<u32, String> {
|
||||
let quota = s.to_lowercase();
|
||||
|
||||
let bytes = if let Ok(b) = quota.parse() {
|
||||
b
|
||||
} else if let Ok(b) = quota.trim_end_matches('b').parse::<u32>() {
|
||||
b
|
||||
} else if let Ok(kb) = quota.trim_end_matches("kb").parse::<u32>() {
|
||||
kb * 1024
|
||||
} else if let Ok(mb) = quota.trim_end_matches("mb").parse::<u32>() {
|
||||
mb * 1024 * 1024
|
||||
} else {
|
||||
return Err("Invalid speed limit".to_string());
|
||||
};
|
||||
Ok(bytes)
|
||||
}
|
||||
9
crunchy-cli-core/src/utils/context.rs
Normal file
9
crunchy-cli-core/src/utils/context.rs
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
use crate::utils::rate_limit::RateLimiterService;
|
||||
use crunchyroll_rs::Crunchyroll;
|
||||
use reqwest::Client;
|
||||
|
||||
pub struct Context {
|
||||
pub crunchy: Crunchyroll,
|
||||
pub client: Client,
|
||||
pub rate_limiter: Option<RateLimiterService>,
|
||||
}
|
||||
1453
crunchy-cli-core/src/utils/download.rs
Normal file
1453
crunchy-cli-core/src/utils/download.rs
Normal file
File diff suppressed because it is too large
Load diff
381
crunchy-cli-core/src/utils/ffmpeg.rs
Normal file
381
crunchy-cli-core/src/utils/ffmpeg.rs
Normal file
|
|
@ -0,0 +1,381 @@
|
|||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub const SOFTSUB_CONTAINERS: [&str; 3] = ["mkv", "mov", "mp4"];
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FFmpegPreset {
|
||||
Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality),
|
||||
Custom(Option<String>),
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref PREDEFINED_PRESET: Regex = Regex::new(r"^\w+(-\w+)*?$").unwrap();
|
||||
}
|
||||
|
||||
macro_rules! ffmpeg_enum {
|
||||
(enum $name:ident { $($field:ident),* }) => {
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum $name {
|
||||
$(
|
||||
$field
|
||||
),*,
|
||||
}
|
||||
|
||||
impl $name {
|
||||
fn all() -> Vec<$name> {
|
||||
vec![
|
||||
$(
|
||||
$name::$field
|
||||
),*,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
$(
|
||||
&$name::$field => write!(f, "{}", stringify!($field).to_string().to_lowercase())
|
||||
),*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for $name {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
match s {
|
||||
$(
|
||||
stringify!($field) => Ok($name::$field)
|
||||
),*,
|
||||
_ => anyhow::bail!("{} is not a valid {}", s, stringify!($name).to_lowercase())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ffmpeg_enum! {
|
||||
enum FFmpegCodec {
|
||||
H264,
|
||||
H265,
|
||||
Av1
|
||||
}
|
||||
}
|
||||
|
||||
ffmpeg_enum! {
|
||||
enum FFmpegHwAccel {
|
||||
Nvidia,
|
||||
Amd,
|
||||
Apple
|
||||
}
|
||||
}
|
||||
|
||||
ffmpeg_enum! {
|
||||
enum FFmpegQuality {
|
||||
Lossless,
|
||||
Normal,
|
||||
Low
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FFmpegPreset {
|
||||
fn default() -> Self {
|
||||
Self::Custom(Some("-c:v copy -c:a copy".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl FFmpegPreset {
|
||||
pub(crate) fn available_matches(
|
||||
) -> Vec<(FFmpegCodec, Option<FFmpegHwAccel>, Option<FFmpegQuality>)> {
|
||||
let codecs = vec![
|
||||
(
|
||||
FFmpegCodec::H264,
|
||||
FFmpegHwAccel::all(),
|
||||
FFmpegQuality::all(),
|
||||
),
|
||||
(
|
||||
FFmpegCodec::H265,
|
||||
FFmpegHwAccel::all(),
|
||||
FFmpegQuality::all(),
|
||||
),
|
||||
(
|
||||
FFmpegCodec::Av1,
|
||||
vec![FFmpegHwAccel::Amd],
|
||||
FFmpegQuality::all(),
|
||||
),
|
||||
];
|
||||
|
||||
let mut return_values = vec![];
|
||||
|
||||
for (codec, hwaccels, qualities) in codecs {
|
||||
return_values.push((codec.clone(), None, None));
|
||||
for hwaccel in hwaccels.clone() {
|
||||
return_values.push((codec.clone(), Some(hwaccel), None));
|
||||
}
|
||||
for quality in qualities.clone() {
|
||||
return_values.push((codec.clone(), None, Some(quality)))
|
||||
}
|
||||
for hwaccel in hwaccels {
|
||||
for quality in qualities.clone() {
|
||||
return_values.push((codec.clone(), Some(hwaccel.clone()), Some(quality)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return_values
|
||||
}
|
||||
|
||||
pub(crate) fn available_matches_human_readable() -> Vec<String> {
|
||||
let mut return_values = vec![];
|
||||
|
||||
for (codec, hwaccel, quality) in FFmpegPreset::available_matches() {
|
||||
let mut description_details = vec![];
|
||||
if let Some(h) = &hwaccel {
|
||||
description_details.push(format!("{h} hardware acceleration"))
|
||||
}
|
||||
if let Some(q) = &quality {
|
||||
description_details.push(format!("{q} video quality/compression"))
|
||||
}
|
||||
|
||||
let description = if description_details.is_empty() {
|
||||
format!("{codec} encoded with default video quality/compression",)
|
||||
} else if description_details.len() == 1 {
|
||||
format!("{} encoded with {}", codec, description_details[0])
|
||||
} else {
|
||||
let first = description_details.remove(0);
|
||||
let last = description_details.remove(description_details.len() - 1);
|
||||
let mid = if !description_details.is_empty() {
|
||||
format!(", {} ", description_details.join(", "))
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
format!("{codec} encoded with {first}{mid} and {last}",)
|
||||
};
|
||||
|
||||
return_values.push(format!(
|
||||
"{} ({})",
|
||||
vec![
|
||||
Some(codec.to_string()),
|
||||
hwaccel.map(|h| h.to_string()),
|
||||
quality.map(|q| q.to_string())
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<String>>()
|
||||
.join("-"),
|
||||
description
|
||||
))
|
||||
}
|
||||
return_values
|
||||
}
|
||||
|
||||
pub(crate) fn parse(s: &str) -> Result<FFmpegPreset, String> {
|
||||
if !PREDEFINED_PRESET.is_match(s) {
|
||||
return Ok(FFmpegPreset::Custom(Some(s.to_string())));
|
||||
}
|
||||
|
||||
let mut codec: Option<FFmpegCodec> = None;
|
||||
let mut hwaccel: Option<FFmpegHwAccel> = None;
|
||||
let mut quality: Option<FFmpegQuality> = None;
|
||||
for token in s.split('-') {
|
||||
if let Some(c) = FFmpegCodec::all()
|
||||
.into_iter()
|
||||
.find(|p| p.to_string() == token.to_lowercase())
|
||||
{
|
||||
if let Some(cc) = codec {
|
||||
return Err(format!("cannot use multiple codecs (found {cc} and {c})",));
|
||||
}
|
||||
codec = Some(c)
|
||||
} else if let Some(h) = FFmpegHwAccel::all()
|
||||
.into_iter()
|
||||
.find(|p| p.to_string() == token.to_lowercase())
|
||||
{
|
||||
if let Some(hh) = hwaccel {
|
||||
return Err(format!(
|
||||
"cannot use multiple hardware accelerations (found {hh} and {h})",
|
||||
));
|
||||
}
|
||||
hwaccel = Some(h)
|
||||
} else if let Some(q) = FFmpegQuality::all()
|
||||
.into_iter()
|
||||
.find(|p| p.to_string() == token.to_lowercase())
|
||||
{
|
||||
if let Some(qq) = quality {
|
||||
return Err(format!(
|
||||
"cannot use multiple ffmpeg preset qualities (found {qq} and {q})",
|
||||
));
|
||||
}
|
||||
quality = Some(q)
|
||||
} else {
|
||||
return Err(format!(
|
||||
"'{}' is not a valid ffmpeg preset (unknown token '{}')",
|
||||
s, token
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(c) = codec {
|
||||
if !FFmpegPreset::available_matches().contains(&(
|
||||
c.clone(),
|
||||
hwaccel.clone(),
|
||||
quality.clone(),
|
||||
)) {
|
||||
return Err("ffmpeg preset is not supported".to_string());
|
||||
}
|
||||
Ok(FFmpegPreset::Predefined(
|
||||
c,
|
||||
hwaccel,
|
||||
quality.unwrap_or(FFmpegQuality::Normal),
|
||||
))
|
||||
} else {
|
||||
Err("cannot use ffmpeg preset with without a codec".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn into_input_output_args(self) -> (Vec<String>, Vec<String>) {
|
||||
match self {
|
||||
FFmpegPreset::Custom(output) => (
|
||||
vec![],
|
||||
output.map_or(vec![], |o| shlex::split(&o).unwrap_or_default()),
|
||||
),
|
||||
FFmpegPreset::Predefined(codec, hwaccel_opt, quality) => {
|
||||
let mut input = vec![];
|
||||
let mut output = vec![];
|
||||
|
||||
match codec {
|
||||
FFmpegCodec::H264 => {
|
||||
let mut crf_quality = || match quality {
|
||||
FFmpegQuality::Lossless => output.extend(["-crf", "18"]),
|
||||
FFmpegQuality::Normal => (),
|
||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||
};
|
||||
|
||||
if let Some(hwaccel) = hwaccel_opt {
|
||||
match hwaccel {
|
||||
FFmpegHwAccel::Nvidia => {
|
||||
input.extend([
|
||||
"-hwaccel",
|
||||
"cuda",
|
||||
"-hwaccel_output_format",
|
||||
"cuda",
|
||||
"-c:v",
|
||||
"h264_cuvid",
|
||||
]);
|
||||
crf_quality();
|
||||
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
|
||||
}
|
||||
FFmpegHwAccel::Amd => {
|
||||
crf_quality();
|
||||
output.extend(["-c:v", "h264_amf", "-c:a", "copy"])
|
||||
}
|
||||
FFmpegHwAccel::Apple => {
|
||||
// Apple's Video Toolbox encoders ignore `-crf`, use `-q:v`
|
||||
// instead. It's on a scale of 1-100, 100 being lossless. Just
|
||||
// did some math ((-a/51+1)*99+1 where `a` is the old crf value)
|
||||
// so these settings very likely need some more tweaking
|
||||
match quality {
|
||||
FFmpegQuality::Lossless => output.extend(["-q:v", "65"]),
|
||||
FFmpegQuality::Normal => (),
|
||||
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
|
||||
}
|
||||
|
||||
output.extend(["-c:v", "h264_videotoolbox", "-c:a", "copy"])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
crf_quality();
|
||||
output.extend(["-c:v", "libx264", "-c:a", "copy"])
|
||||
}
|
||||
}
|
||||
FFmpegCodec::H265 => {
|
||||
let mut crf_quality = || match quality {
|
||||
FFmpegQuality::Lossless => output.extend(["-crf", "20"]),
|
||||
FFmpegQuality::Normal => (),
|
||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||
};
|
||||
|
||||
if let Some(hwaccel) = hwaccel_opt {
|
||||
match hwaccel {
|
||||
FFmpegHwAccel::Nvidia => {
|
||||
input.extend([
|
||||
"-hwaccel",
|
||||
"cuda",
|
||||
"-hwaccel_output_format",
|
||||
"cuda",
|
||||
"-c:v",
|
||||
"h264_cuvid",
|
||||
]);
|
||||
crf_quality();
|
||||
output.extend([
|
||||
"-c:v",
|
||||
"hevc_nvenc",
|
||||
"-c:a",
|
||||
"copy",
|
||||
"-tag:v",
|
||||
"hvc1",
|
||||
])
|
||||
}
|
||||
FFmpegHwAccel::Amd => {
|
||||
crf_quality();
|
||||
output.extend(["-c:v", "hevc_amf", "-c:a", "copy"])
|
||||
}
|
||||
FFmpegHwAccel::Apple => {
|
||||
// See the comment for apple h264 hwaccel
|
||||
match quality {
|
||||
FFmpegQuality::Lossless => output.extend(["-q:v", "61"]),
|
||||
FFmpegQuality::Normal => (),
|
||||
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
|
||||
}
|
||||
|
||||
output.extend([
|
||||
"-c:v",
|
||||
"hevc_videotoolbox",
|
||||
"-c:a",
|
||||
"copy",
|
||||
"-tag:v",
|
||||
"hvc1",
|
||||
])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
crf_quality();
|
||||
output.extend(["-c:v", "libx265", "-c:a", "copy", "-tag:v", "hvc1"])
|
||||
}
|
||||
}
|
||||
FFmpegCodec::Av1 => {
|
||||
let mut crf_quality = || match quality {
|
||||
FFmpegQuality::Lossless => output.extend(["-crf", "22"]),
|
||||
FFmpegQuality::Normal => (),
|
||||
FFmpegQuality::Low => output.extend(["-crf", "35"]),
|
||||
};
|
||||
|
||||
crf_quality();
|
||||
if let Some(FFmpegHwAccel::Amd) = hwaccel_opt {
|
||||
output.extend(["-c:v", "av1_amf", "-c:a", "copy"]);
|
||||
} else {
|
||||
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
input
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
output
|
||||
.into_iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
467
crunchy-cli-core/src/utils/filter.rs
Normal file
467
crunchy-cli-core/src/utils/filter.rs
Normal file
|
|
@ -0,0 +1,467 @@
|
|||
use crate::utils::format::{SingleFormat, SingleFormatCollection};
|
||||
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
|
||||
use crate::utils::parse::{fract, UrlFilter};
|
||||
use anyhow::Result;
|
||||
use crunchyroll_rs::{
|
||||
Concert, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series,
|
||||
};
|
||||
use log::{info, warn};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::ops::Not;
|
||||
|
||||
pub(crate) enum FilterMediaScope<'a> {
|
||||
Series(&'a Series),
|
||||
Season(&'a Season),
|
||||
/// Always contains 1 or 2 episodes.
|
||||
/// - 1: The episode's audio is completely missing
|
||||
/// - 2: The requested audio is only available from first entry to last entry
|
||||
Episode(Vec<&'a Episode>),
|
||||
}
|
||||
|
||||
pub(crate) struct Filter {
|
||||
url_filter: UrlFilter,
|
||||
|
||||
skip_specials: bool,
|
||||
interactive_input: bool,
|
||||
|
||||
relative_episode_number: bool,
|
||||
|
||||
audio_locales: Vec<Locale>,
|
||||
subtitle_locales: Vec<Locale>,
|
||||
|
||||
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||
no_premium: fn(u32) -> Result<()>,
|
||||
|
||||
is_premium: bool,
|
||||
|
||||
series_visited: bool,
|
||||
season_episodes: HashMap<String, Vec<Episode>>,
|
||||
season_with_premium: Option<Vec<u32>>,
|
||||
season_sorting: Vec<String>,
|
||||
}
|
||||
|
||||
impl Filter {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn new(
|
||||
url_filter: UrlFilter,
|
||||
audio_locales: Vec<Locale>,
|
||||
subtitle_locales: Vec<Locale>,
|
||||
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
|
||||
no_premium: fn(u32) -> Result<()>,
|
||||
relative_episode_number: bool,
|
||||
interactive_input: bool,
|
||||
skip_specials: bool,
|
||||
is_premium: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
url_filter,
|
||||
audio_locales,
|
||||
subtitle_locales,
|
||||
relative_episode_number,
|
||||
interactive_input,
|
||||
audios_missing,
|
||||
subtitles_missing,
|
||||
no_premium,
|
||||
is_premium,
|
||||
series_visited: false,
|
||||
season_episodes: HashMap::new(),
|
||||
skip_specials,
|
||||
season_with_premium: is_premium.not().then_some(vec![]),
|
||||
season_sorting: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
|
||||
// the audio locales field isn't always populated
|
||||
if !series.audio_locales.is_empty() {
|
||||
let missing_audios = missing_locales(&series.audio_locales, &self.audio_locales);
|
||||
if !missing_audios.is_empty()
|
||||
&& !(self.audios_missing)(FilterMediaScope::Series(&series), missing_audios)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let missing_subtitles =
|
||||
missing_locales(&series.subtitle_locales, &self.subtitle_locales);
|
||||
if !missing_subtitles.is_empty()
|
||||
&& !(self.subtitles_missing)(FilterMediaScope::Series(&series), missing_subtitles)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
}
|
||||
|
||||
let mut seasons = vec![];
|
||||
for season in series.seasons().await? {
|
||||
if !self.url_filter.is_season_valid(season.season_number) {
|
||||
continue;
|
||||
}
|
||||
let missing_audios = missing_locales(
|
||||
&season
|
||||
.versions
|
||||
.iter()
|
||||
.map(|l| l.audio_locale.clone())
|
||||
.collect::<Vec<Locale>>(),
|
||||
&self.audio_locales,
|
||||
);
|
||||
if !missing_audios.is_empty()
|
||||
&& !(self.audios_missing)(FilterMediaScope::Season(&season), missing_audios)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
seasons.push(season)
|
||||
}
|
||||
|
||||
let duplicated_seasons = get_duplicated_seasons(&seasons);
|
||||
if !duplicated_seasons.is_empty() {
|
||||
if self.interactive_input {
|
||||
check_for_duplicated_seasons(&mut seasons)
|
||||
} else {
|
||||
info!(
|
||||
"Found duplicated seasons: {}",
|
||||
duplicated_seasons
|
||||
.iter()
|
||||
.map(|d| d.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
self.series_visited = true;
|
||||
|
||||
Ok(seasons)
|
||||
}
|
||||
|
||||
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>> {
|
||||
if !self.url_filter.is_season_valid(season.season_number) {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let mut seasons = vec![];
|
||||
if self
|
||||
.audio_locales
|
||||
.iter()
|
||||
.any(|l| season.audio_locales.contains(l))
|
||||
{
|
||||
seasons.push(season.clone())
|
||||
}
|
||||
for version in season.versions {
|
||||
if season.id == version.id {
|
||||
continue;
|
||||
}
|
||||
if self.audio_locales.contains(&version.audio_locale) {
|
||||
seasons.push(version.season().await?)
|
||||
}
|
||||
}
|
||||
|
||||
let mut episodes = vec![];
|
||||
for season in seasons {
|
||||
self.season_sorting.push(season.id.clone());
|
||||
let mut eps = season.episodes().await?;
|
||||
|
||||
// removes any episode that does not have the audio locale of the season. yes, this is
|
||||
// the case sometimes
|
||||
if season.audio_locales.len() < 2 {
|
||||
let season_locale = season
|
||||
.audio_locales
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or(Locale::ja_JP);
|
||||
eps.retain(|e| e.audio_locale == season_locale)
|
||||
}
|
||||
|
||||
#[allow(clippy::if_same_then_else)]
|
||||
if eps.len() < season.number_of_episodes as usize {
|
||||
if eps.is_empty()
|
||||
&& !(self.audios_missing)(
|
||||
FilterMediaScope::Season(&season),
|
||||
season.audio_locales.iter().collect(),
|
||||
)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
} else if !eps.is_empty()
|
||||
&& !(self.audios_missing)(
|
||||
FilterMediaScope::Episode(vec![eps.first().unwrap(), eps.last().unwrap()]),
|
||||
vec![&eps.first().unwrap().audio_locale],
|
||||
)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
}
|
||||
|
||||
episodes.extend(eps)
|
||||
}
|
||||
|
||||
if self.relative_episode_number {
|
||||
for episode in &episodes {
|
||||
self.season_episodes
|
||||
.entry(episode.season_id.clone())
|
||||
.or_default()
|
||||
.push(episode.clone())
|
||||
}
|
||||
}
|
||||
|
||||
Ok(episodes)
|
||||
}
|
||||
|
||||
async fn visit_episode(&mut self, episode: Episode) -> Result<Vec<SingleFormat>> {
|
||||
if !self
|
||||
.url_filter
|
||||
.is_episode_valid(episode.sequence_number, episode.season_number)
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
// skip the episode if it's a special
|
||||
if self.skip_specials
|
||||
&& (episode.sequence_number == 0.0 || episode.sequence_number.fract() != 0.0)
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let mut episodes = vec![];
|
||||
if !self.series_visited {
|
||||
if self.audio_locales.contains(&episode.audio_locale) {
|
||||
episodes.push(episode.clone())
|
||||
}
|
||||
for version in &episode.versions {
|
||||
// `episode` is also a version of itself. the if block above already adds the
|
||||
// episode if it matches the requested audio, so it doesn't need to be requested
|
||||
// here again
|
||||
if version.id == episode.id {
|
||||
continue;
|
||||
}
|
||||
if self.audio_locales.contains(&version.audio_locale) {
|
||||
episodes.push(version.episode().await?)
|
||||
}
|
||||
}
|
||||
|
||||
let audio_locales: Vec<Locale> =
|
||||
episodes.iter().map(|e| e.audio_locale.clone()).collect();
|
||||
let missing_audios = missing_locales(&audio_locales, &self.audio_locales);
|
||||
if !missing_audios.is_empty()
|
||||
&& !(self.audios_missing)(
|
||||
FilterMediaScope::Episode(vec![&episode]),
|
||||
missing_audios,
|
||||
)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let mut subtitle_locales: Vec<Locale> = episodes
|
||||
.iter()
|
||||
.flat_map(|e| e.subtitle_locales.clone())
|
||||
.collect();
|
||||
subtitle_locales.sort();
|
||||
subtitle_locales.dedup();
|
||||
let missing_subtitles = missing_locales(&subtitle_locales, &self.subtitle_locales);
|
||||
if !missing_subtitles.is_empty()
|
||||
&& !(self.subtitles_missing)(
|
||||
FilterMediaScope::Episode(vec![&episode]),
|
||||
missing_subtitles,
|
||||
)?
|
||||
{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
} else {
|
||||
episodes.push(episode.clone())
|
||||
}
|
||||
|
||||
if let Some(seasons_with_premium) = &mut self.season_with_premium {
|
||||
let episodes_len_before = episodes.len();
|
||||
episodes.retain(|e| !e.is_premium_only && !self.is_premium);
|
||||
if episodes_len_before < episodes.len()
|
||||
&& !seasons_with_premium.contains(&episode.season_number)
|
||||
{
|
||||
(self.no_premium)(episode.season_number)?;
|
||||
seasons_with_premium.push(episode.season_number)
|
||||
}
|
||||
|
||||
if episodes.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
}
|
||||
|
||||
let mut relative_episode_number = None;
|
||||
let mut relative_sequence_number = None;
|
||||
if self.relative_episode_number {
|
||||
let season_eps = match self.season_episodes.get(&episode.season_id) {
|
||||
Some(eps) => eps,
|
||||
None => {
|
||||
self.season_episodes.insert(
|
||||
episode.season_id.clone(),
|
||||
episode.season().await?.episodes().await?,
|
||||
);
|
||||
self.season_episodes.get(&episode.season_id).unwrap()
|
||||
}
|
||||
};
|
||||
let mut non_integer_sequence_number_count = 0;
|
||||
for (i, ep) in season_eps.iter().enumerate() {
|
||||
if ep.sequence_number != 0.0 || ep.sequence_number.fract() == 0.0 {
|
||||
non_integer_sequence_number_count += 1
|
||||
}
|
||||
if ep.id == episode.id {
|
||||
relative_episode_number = Some(i + 1);
|
||||
relative_sequence_number = Some(
|
||||
(i + 1 - non_integer_sequence_number_count) as f32
|
||||
+ fract(ep.sequence_number),
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if relative_episode_number.is_none() || relative_sequence_number.is_none() {
|
||||
warn!(
|
||||
"Failed to get relative episode number for episode {} ({}) of {} season {}",
|
||||
episode.sequence_number,
|
||||
episode.title,
|
||||
episode.series_title,
|
||||
episode.season_number,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(episodes
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
SingleFormat::new_from_episode(
|
||||
e.clone(),
|
||||
e.subtitle_locales,
|
||||
relative_episode_number.map(|n| n as u32),
|
||||
relative_sequence_number,
|
||||
)
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
|
||||
Ok(movie_listing.movies().await?)
|
||||
}
|
||||
|
||||
async fn visit_movie(&mut self, movie: Movie) -> Result<Vec<SingleFormat>> {
|
||||
Ok(vec![SingleFormat::new_from_movie(movie, vec![])])
|
||||
}
|
||||
|
||||
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Vec<SingleFormat>> {
|
||||
Ok(vec![SingleFormat::new_from_music_video(music_video)])
|
||||
}
|
||||
|
||||
async fn visit_concert(&mut self, concert: Concert) -> Result<Vec<SingleFormat>> {
|
||||
Ok(vec![SingleFormat::new_from_concert(concert)])
|
||||
}
|
||||
|
||||
async fn finish(self, input: Vec<Vec<SingleFormat>>) -> Result<SingleFormatCollection> {
|
||||
let flatten_input: Vec<SingleFormat> = input.into_iter().flatten().collect();
|
||||
|
||||
let mut single_format_collection = SingleFormatCollection::new();
|
||||
|
||||
let mut pre_sorted: BTreeMap<String, Vec<SingleFormat>> = BTreeMap::new();
|
||||
for data in flatten_input {
|
||||
pre_sorted
|
||||
.entry(data.identifier.clone())
|
||||
.or_default()
|
||||
.push(data)
|
||||
}
|
||||
|
||||
let mut sorted: Vec<(String, Vec<SingleFormat>)> = pre_sorted.into_iter().collect();
|
||||
sorted.sort_by(|(_, a), (_, b)| {
|
||||
self.season_sorting
|
||||
.iter()
|
||||
.position(|p| p == &a.first().unwrap().season_id)
|
||||
.unwrap()
|
||||
.cmp(
|
||||
&self
|
||||
.season_sorting
|
||||
.iter()
|
||||
.position(|p| p == &b.first().unwrap().season_id)
|
||||
.unwrap(),
|
||||
)
|
||||
});
|
||||
|
||||
for (_, mut data) in sorted {
|
||||
data.sort_by(|a, b| {
|
||||
self.audio_locales
|
||||
.iter()
|
||||
.position(|p| p == &a.audio)
|
||||
.unwrap_or(usize::MAX)
|
||||
.cmp(
|
||||
&self
|
||||
.audio_locales
|
||||
.iter()
|
||||
.position(|p| p == &b.audio)
|
||||
.unwrap_or(usize::MAX),
|
||||
)
|
||||
});
|
||||
single_format_collection.add_single_formats(data)
|
||||
}
|
||||
|
||||
Ok(single_format_collection)
|
||||
}
|
||||
|
||||
pub(crate) async fn visit(
|
||||
mut self,
|
||||
media_collection: MediaCollection,
|
||||
) -> Result<SingleFormatCollection> {
|
||||
let mut items = vec![media_collection];
|
||||
let mut result = vec![];
|
||||
|
||||
while !items.is_empty() {
|
||||
let mut new_items: Vec<MediaCollection> = vec![];
|
||||
|
||||
for i in items {
|
||||
match i {
|
||||
MediaCollection::Series(series) => new_items.extend(
|
||||
self.visit_series(series)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|s| s.into())
|
||||
.collect::<Vec<MediaCollection>>(),
|
||||
),
|
||||
MediaCollection::Season(season) => new_items.extend(
|
||||
self.visit_season(season)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|s| s.into())
|
||||
.collect::<Vec<MediaCollection>>(),
|
||||
),
|
||||
MediaCollection::Episode(episode) => {
|
||||
result.push(self.visit_episode(episode).await?)
|
||||
}
|
||||
MediaCollection::MovieListing(movie_listing) => new_items.extend(
|
||||
self.visit_movie_listing(movie_listing)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|m| m.into())
|
||||
.collect::<Vec<MediaCollection>>(),
|
||||
),
|
||||
MediaCollection::Movie(movie) => result.push(self.visit_movie(movie).await?),
|
||||
MediaCollection::MusicVideo(music_video) => {
|
||||
result.push(self.visit_music_video(music_video).await?)
|
||||
}
|
||||
MediaCollection::Concert(concert) => {
|
||||
result.push(self.visit_concert(concert).await?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
items = new_items
|
||||
}
|
||||
|
||||
self.finish(result).await
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_locales<'a>(available: &[Locale], searched: &'a [Locale]) -> Vec<&'a Locale> {
|
||||
searched.iter().filter(|p| !available.contains(p)).collect()
|
||||
}
|
||||
|
||||
/// Remove all duplicates from a [`Vec`].
|
||||
pub fn real_dedup_vec<T: Clone + Eq>(input: &mut Vec<T>) {
|
||||
let mut dedup = vec![];
|
||||
for item in input.clone() {
|
||||
if !dedup.contains(&item) {
|
||||
dedup.push(item);
|
||||
}
|
||||
}
|
||||
*input = dedup
|
||||
}
|
||||
19
crunchy-cli-core/src/utils/fmt.rs
Normal file
19
crunchy-cli-core/src/utils/fmt.rs
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
use chrono::TimeDelta;
|
||||
|
||||
pub fn format_time_delta(time_delta: &TimeDelta) -> String {
|
||||
let negative = *time_delta < TimeDelta::zero();
|
||||
let time_delta = time_delta.abs();
|
||||
let hours = time_delta.num_hours();
|
||||
let minutes = time_delta.num_minutes() - time_delta.num_hours() * 60;
|
||||
let seconds = time_delta.num_seconds() - time_delta.num_minutes() * 60;
|
||||
let milliseconds = time_delta.num_milliseconds() - time_delta.num_seconds() * 1000;
|
||||
|
||||
format!(
|
||||
"{}{}:{:0>2}:{:0>2}.{:0>3}",
|
||||
if negative { "-" } else { "" },
|
||||
hours,
|
||||
minutes,
|
||||
seconds,
|
||||
milliseconds
|
||||
)
|
||||
}
|
||||
603
crunchy-cli-core/src/utils/format.rs
Normal file
603
crunchy-cli-core/src/utils/format.rs
Normal file
|
|
@ -0,0 +1,603 @@
|
|||
use crate::utils::filter::real_dedup_vec;
|
||||
use crate::utils::locale::LanguageTagging;
|
||||
use crate::utils::log::tab_info;
|
||||
use crate::utils::os::{is_special_file, sanitize};
|
||||
use anyhow::{bail, Result};
|
||||
use chrono::{Datelike, Duration};
|
||||
use crunchyroll_rs::media::{SkipEvents, Stream, StreamData, Subtitle};
|
||||
use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo};
|
||||
use log::{debug, info};
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::BTreeMap;
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Clone)]
|
||||
pub struct SingleFormat {
|
||||
pub identifier: String,
|
||||
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
|
||||
pub release_year: u64,
|
||||
pub release_month: u64,
|
||||
pub release_day: u64,
|
||||
|
||||
pub audio: Locale,
|
||||
pub subtitles: Vec<Locale>,
|
||||
|
||||
pub series_id: String,
|
||||
pub series_name: String,
|
||||
|
||||
pub season_id: String,
|
||||
pub season_title: String,
|
||||
pub season_number: u32,
|
||||
|
||||
pub episode_id: String,
|
||||
pub episode_number: String,
|
||||
pub relative_episode_number: Option<u32>,
|
||||
pub sequence_number: f32,
|
||||
pub relative_sequence_number: Option<f32>,
|
||||
|
||||
pub duration: Duration,
|
||||
|
||||
source: MediaCollection,
|
||||
}
|
||||
|
||||
impl SingleFormat {
|
||||
pub fn new_from_episode(
|
||||
episode: Episode,
|
||||
subtitles: Vec<Locale>,
|
||||
relative_episode_number: Option<u32>,
|
||||
relative_sequence_number: Option<f32>,
|
||||
) -> Self {
|
||||
Self {
|
||||
identifier: if episode.identifier.is_empty() {
|
||||
// crunchyroll sometimes leafs the identifier field empty so we have to build it
|
||||
// ourself. it's not 100% save that the identifier which is built here is the same
|
||||
// as if crunchyroll would deliver it (because the variables used here may also be
|
||||
// wrong delivered by crunchy), but it's the best thing i can do at the moment
|
||||
format!(
|
||||
"{}|S{}|E{}",
|
||||
episode.series_id, episode.season_number, episode.sequence_number
|
||||
)
|
||||
} else {
|
||||
episode.identifier.clone()
|
||||
},
|
||||
title: episode.title.clone(),
|
||||
description: episode.description.clone(),
|
||||
release_year: episode.episode_air_date.year() as u64,
|
||||
release_month: episode.episode_air_date.month() as u64,
|
||||
release_day: episode.episode_air_date.day() as u64,
|
||||
audio: episode.audio_locale.clone(),
|
||||
subtitles,
|
||||
series_id: episode.series_id.clone(),
|
||||
series_name: episode.series_title.clone(),
|
||||
season_id: episode.season_id.clone(),
|
||||
season_title: episode.season_title.to_string(),
|
||||
season_number: episode.season_number,
|
||||
episode_id: episode.id.clone(),
|
||||
episode_number: if episode.episode.is_empty() {
|
||||
episode.sequence_number.to_string()
|
||||
} else {
|
||||
episode.episode.clone()
|
||||
},
|
||||
sequence_number: episode.sequence_number,
|
||||
relative_episode_number,
|
||||
relative_sequence_number,
|
||||
duration: episode.duration,
|
||||
source: episode.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_from_movie(movie: Movie, subtitles: Vec<Locale>) -> Self {
|
||||
Self {
|
||||
identifier: movie.id.clone(),
|
||||
title: movie.title.clone(),
|
||||
description: movie.description.clone(),
|
||||
release_year: movie.free_available_date.year() as u64,
|
||||
release_month: movie.free_available_date.month() as u64,
|
||||
release_day: movie.free_available_date.day() as u64,
|
||||
audio: Locale::ja_JP,
|
||||
subtitles,
|
||||
series_id: movie.movie_listing_id.clone(),
|
||||
series_name: movie.movie_listing_title.clone(),
|
||||
season_id: movie.movie_listing_id.clone(),
|
||||
season_title: movie.movie_listing_title.to_string(),
|
||||
season_number: 1,
|
||||
episode_id: movie.id.clone(),
|
||||
episode_number: "1".to_string(),
|
||||
relative_episode_number: Some(1),
|
||||
sequence_number: 1.0,
|
||||
relative_sequence_number: Some(1.0),
|
||||
duration: movie.duration,
|
||||
source: movie.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_from_music_video(music_video: MusicVideo) -> Self {
|
||||
Self {
|
||||
identifier: music_video.id.clone(),
|
||||
title: music_video.title.clone(),
|
||||
description: music_video.description.clone(),
|
||||
release_year: music_video.original_release.year() as u64,
|
||||
release_month: music_video.original_release.month() as u64,
|
||||
release_day: music_video.original_release.day() as u64,
|
||||
audio: Locale::ja_JP,
|
||||
subtitles: vec![],
|
||||
series_id: music_video.id.clone(),
|
||||
series_name: music_video.title.clone(),
|
||||
season_id: music_video.id.clone(),
|
||||
season_title: music_video.title.clone(),
|
||||
season_number: 1,
|
||||
episode_id: music_video.id.clone(),
|
||||
episode_number: "1".to_string(),
|
||||
relative_episode_number: Some(1),
|
||||
sequence_number: 1.0,
|
||||
relative_sequence_number: Some(1.0),
|
||||
duration: music_video.duration,
|
||||
source: music_video.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_from_concert(concert: Concert) -> Self {
|
||||
Self {
|
||||
identifier: concert.id.clone(),
|
||||
title: concert.title.clone(),
|
||||
description: concert.description.clone(),
|
||||
release_year: concert.original_release.year() as u64,
|
||||
release_month: concert.original_release.month() as u64,
|
||||
release_day: concert.original_release.day() as u64,
|
||||
audio: Locale::ja_JP,
|
||||
subtitles: vec![],
|
||||
series_id: concert.id.clone(),
|
||||
series_name: concert.title.clone(),
|
||||
season_id: concert.id.clone(),
|
||||
season_title: concert.title.clone(),
|
||||
season_number: 1,
|
||||
episode_id: concert.id.clone(),
|
||||
episode_number: "1".to_string(),
|
||||
relative_episode_number: Some(1),
|
||||
sequence_number: 1.0,
|
||||
relative_sequence_number: Some(1.0),
|
||||
duration: concert.duration,
|
||||
source: concert.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn stream(&self) -> Result<Stream> {
|
||||
let stream = match &self.source {
|
||||
MediaCollection::Episode(e) => e.stream_maybe_without_drm().await,
|
||||
MediaCollection::Movie(m) => m.stream_maybe_without_drm().await,
|
||||
MediaCollection::MusicVideo(mv) => mv.stream_maybe_without_drm().await,
|
||||
MediaCollection::Concert(c) => c.stream_maybe_without_drm().await,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
if let Err(crunchyroll_rs::error::Error::Request { message, .. }) = &stream {
|
||||
if message.starts_with("TOO_MANY_ACTIVE_STREAMS") {
|
||||
bail!("Too many active/parallel streams. Please close at least one stream you're watching and try again")
|
||||
}
|
||||
};
|
||||
Ok(stream?)
|
||||
}
|
||||
|
||||
pub async fn skip_events(&self) -> Result<Option<SkipEvents>> {
|
||||
match &self.source {
|
||||
MediaCollection::Episode(e) => Ok(Some(e.skip_events().await?)),
|
||||
MediaCollection::Movie(m) => Ok(Some(m.skip_events().await?)),
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn source_type(&self) -> String {
|
||||
match &self.source {
|
||||
MediaCollection::Episode(_) => "episode",
|
||||
MediaCollection::Movie(_) => "movie",
|
||||
MediaCollection::MusicVideo(_) => "music video",
|
||||
MediaCollection::Concert(_) => "concert",
|
||||
_ => unreachable!(),
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub fn is_episode(&self) -> bool {
|
||||
matches!(self.source, MediaCollection::Episode(_))
|
||||
}
|
||||
|
||||
pub fn is_special(&self) -> bool {
|
||||
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
|
||||
}
|
||||
}
|
||||
|
||||
struct SingleFormatCollectionEpisodeKey(f32);
|
||||
|
||||
impl PartialOrd for SingleFormatCollectionEpisodeKey {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
impl Ord for SingleFormatCollectionEpisodeKey {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.0.total_cmp(&other.0)
|
||||
}
|
||||
}
|
||||
impl PartialEq for SingleFormatCollectionEpisodeKey {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.eq(&other.0)
|
||||
}
|
||||
}
|
||||
impl Eq for SingleFormatCollectionEpisodeKey {}
|
||||
|
||||
struct SingleFormatCollectionSeasonKey((u32, String));
|
||||
|
||||
#[allow(clippy::non_canonical_partial_ord_impl)]
|
||||
impl PartialOrd for SingleFormatCollectionSeasonKey {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
let mut cmp = self.0 .0.partial_cmp(&other.0 .0);
|
||||
if let Some(ordering) = cmp {
|
||||
if matches!(ordering, Ordering::Equal) && self.0 .1 != other.0 .1 {
|
||||
// first come first serve
|
||||
cmp = Some(Ordering::Greater)
|
||||
}
|
||||
}
|
||||
cmp
|
||||
}
|
||||
}
|
||||
impl Ord for SingleFormatCollectionSeasonKey {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
let mut cmp = self.0 .0.cmp(&other.0 .0);
|
||||
if matches!(cmp, Ordering::Equal) && self.0 .1 != other.0 .1 {
|
||||
// first come first serve
|
||||
cmp = Ordering::Greater
|
||||
}
|
||||
cmp
|
||||
}
|
||||
}
|
||||
impl PartialEq for SingleFormatCollectionSeasonKey {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.eq(&other.0)
|
||||
}
|
||||
}
|
||||
impl Eq for SingleFormatCollectionSeasonKey {}
|
||||
|
||||
pub struct SingleFormatCollection(
|
||||
BTreeMap<
|
||||
SingleFormatCollectionSeasonKey,
|
||||
BTreeMap<SingleFormatCollectionEpisodeKey, Vec<SingleFormat>>,
|
||||
>,
|
||||
);
|
||||
|
||||
impl SingleFormatCollection {
|
||||
pub fn new() -> Self {
|
||||
Self(BTreeMap::new())
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn add_single_formats(&mut self, single_formats: Vec<SingleFormat>) {
|
||||
let format = single_formats.first().unwrap();
|
||||
self.0
|
||||
.entry(SingleFormatCollectionSeasonKey((
|
||||
format.season_number,
|
||||
format.season_id.clone(),
|
||||
)))
|
||||
.or_default()
|
||||
.insert(
|
||||
SingleFormatCollectionEpisodeKey(format.sequence_number),
|
||||
single_formats,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn full_visual_output(&self) {
|
||||
debug!("Series has {} seasons", self.0.len());
|
||||
for (season_key, episodes) in &self.0 {
|
||||
let first_episode = episodes.first_key_value().unwrap().1.first().unwrap();
|
||||
info!(
|
||||
"{} Season {} ({})",
|
||||
first_episode.series_name.clone(),
|
||||
season_key.0 .0,
|
||||
first_episode.season_title.clone(),
|
||||
);
|
||||
for (i, (_, formats)) in episodes.iter().enumerate() {
|
||||
let format = formats.first().unwrap();
|
||||
if log::max_level() == log::Level::Debug {
|
||||
info!(
|
||||
"{} S{:02}E{:0>2}",
|
||||
format.title, format.season_number, format.episode_number
|
||||
)
|
||||
} else {
|
||||
tab_info!(
|
||||
"{}. {} » S{:02}E{:0>2}",
|
||||
i + 1,
|
||||
format.title,
|
||||
format.season_number,
|
||||
format.episode_number
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SingleFormatCollection {
|
||||
type Item = Vec<SingleFormat>;
|
||||
type IntoIter = SingleFormatCollectionIterator;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
SingleFormatCollectionIterator(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SingleFormatCollectionIterator(SingleFormatCollection);
|
||||
|
||||
impl Iterator for SingleFormatCollectionIterator {
|
||||
type Item = Vec<SingleFormat>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (_, episodes) = self.0 .0.iter_mut().next()?;
|
||||
|
||||
let value = episodes.pop_first().unwrap().1;
|
||||
if episodes.is_empty() {
|
||||
self.0 .0.pop_first();
|
||||
}
|
||||
Some(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Clone)]
|
||||
pub struct Format {
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
|
||||
pub locales: Vec<(Locale, Vec<Locale>)>,
|
||||
|
||||
pub width: u64,
|
||||
pub height: u64,
|
||||
pub fps: f64,
|
||||
|
||||
pub release_year: u64,
|
||||
pub release_month: u64,
|
||||
pub release_day: u64,
|
||||
|
||||
pub series_id: String,
|
||||
pub series_name: String,
|
||||
|
||||
pub season_id: String,
|
||||
pub season_title: String,
|
||||
pub season_number: u32,
|
||||
|
||||
pub episode_id: String,
|
||||
pub episode_number: String,
|
||||
pub relative_episode_number: Option<u32>,
|
||||
pub sequence_number: f32,
|
||||
pub relative_sequence_number: Option<f32>,
|
||||
}
|
||||
|
||||
impl Format {
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn from_single_formats(
|
||||
mut single_formats: Vec<(SingleFormat, StreamData, Vec<(Subtitle, bool)>)>,
|
||||
) -> Self {
|
||||
let locales: Vec<(Locale, Vec<Locale>)> = single_formats
|
||||
.iter()
|
||||
.map(|(single_format, _, subtitles)| {
|
||||
(
|
||||
single_format.audio.clone(),
|
||||
subtitles
|
||||
.iter()
|
||||
.map(|(s, _)| s.locale.clone())
|
||||
.collect::<Vec<Locale>>(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let (first_format, first_stream, _) = single_formats.remove(0);
|
||||
|
||||
Self {
|
||||
title: first_format.title,
|
||||
description: first_format.description,
|
||||
locales,
|
||||
width: first_stream.resolution().unwrap().width,
|
||||
height: first_stream.resolution().unwrap().height,
|
||||
fps: first_stream.fps().unwrap(),
|
||||
release_year: first_format.release_year,
|
||||
release_month: first_format.release_month,
|
||||
release_day: first_format.release_day,
|
||||
series_id: first_format.series_id,
|
||||
series_name: first_format.series_name,
|
||||
season_id: first_format.season_id,
|
||||
season_title: first_format.season_title,
|
||||
season_number: first_format.season_number,
|
||||
episode_id: first_format.episode_id,
|
||||
episode_number: first_format.episode_number,
|
||||
relative_episode_number: first_format.relative_episode_number,
|
||||
sequence_number: first_format.sequence_number,
|
||||
relative_sequence_number: first_format.relative_sequence_number,
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats the given string if it has specific pattern in it. It also sanitizes the filename.
|
||||
pub fn format_path(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
universal: bool,
|
||||
language_tagging: Option<&LanguageTagging>,
|
||||
) -> PathBuf {
|
||||
let path = path
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.replace("{title}", &sanitize(&self.title, true, universal))
|
||||
.replace(
|
||||
"{audio}",
|
||||
&sanitize(
|
||||
self.locales
|
||||
.iter()
|
||||
.map(|(a, _)| language_tagging.map_or(a.to_string(), |t| t.for_locale(a)))
|
||||
.collect::<Vec<String>>()
|
||||
.join(
|
||||
&env::var("CRUNCHY_CLI_FORMAT_DELIMITER")
|
||||
.map_or("_".to_string(), |e| e),
|
||||
),
|
||||
true,
|
||||
universal,
|
||||
),
|
||||
)
|
||||
.replace(
|
||||
"{width}",
|
||||
&sanitize(self.width.to_string(), true, universal),
|
||||
)
|
||||
.replace(
|
||||
"{height}",
|
||||
&sanitize(self.height.to_string(), true, universal),
|
||||
)
|
||||
.replace("{series_id}", &sanitize(&self.series_id, true, universal))
|
||||
.replace(
|
||||
"{series_name}",
|
||||
&sanitize(&self.series_name, true, universal),
|
||||
)
|
||||
.replace("{season_id}", &sanitize(&self.season_id, true, universal))
|
||||
.replace(
|
||||
"{season_name}",
|
||||
&sanitize(&self.season_title, true, universal),
|
||||
)
|
||||
.replace(
|
||||
"{season_number}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(self.season_number.to_string(), true, universal)
|
||||
),
|
||||
)
|
||||
.replace("{episode_id}", &sanitize(&self.episode_id, true, universal))
|
||||
.replace(
|
||||
"{episode_number}",
|
||||
&format!("{:0>2}", sanitize(&self.episode_number, true, universal)),
|
||||
)
|
||||
.replace(
|
||||
"{relative_episode_number}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(
|
||||
self.relative_episode_number.unwrap_or_default().to_string(),
|
||||
true,
|
||||
universal,
|
||||
)
|
||||
),
|
||||
)
|
||||
.replace(
|
||||
"{sequence_number}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(self.sequence_number.to_string(), true, universal)
|
||||
),
|
||||
)
|
||||
.replace(
|
||||
"{relative_sequence_number}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(
|
||||
self.relative_sequence_number
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
true,
|
||||
universal,
|
||||
)
|
||||
),
|
||||
)
|
||||
.replace(
|
||||
"{release_year}",
|
||||
&sanitize(self.release_year.to_string(), true, universal),
|
||||
)
|
||||
.replace(
|
||||
"{release_month}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(self.release_month.to_string(), true, universal)
|
||||
),
|
||||
)
|
||||
.replace(
|
||||
"{release_day}",
|
||||
&format!(
|
||||
"{:0>2}",
|
||||
sanitize(self.release_day.to_string(), true, universal)
|
||||
),
|
||||
);
|
||||
|
||||
let mut path = PathBuf::from(path);
|
||||
|
||||
// make sure that every path section has a maximum of 255 characters
|
||||
if path.file_name().unwrap_or_default().to_string_lossy().len() > 255 {
|
||||
let name = path
|
||||
.file_stem()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let ext = path
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
if ext != name {
|
||||
path.set_file_name(format!("{}.{}", &name[..(255 - ext.len() - 1)], ext))
|
||||
}
|
||||
}
|
||||
path.iter()
|
||||
.map(|s| {
|
||||
if s.len() > 255 {
|
||||
s.to_string_lossy()[..255].to_string()
|
||||
} else {
|
||||
s.to_string_lossy().to_string()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn visual_output(&self, dst: &Path) {
|
||||
info!(
|
||||
"Downloading {} to {}",
|
||||
self.title,
|
||||
if is_special_file(dst) || dst.to_str().unwrap() == "-" {
|
||||
dst.to_string_lossy().to_string()
|
||||
} else {
|
||||
format!("'{}'", dst.to_str().unwrap())
|
||||
}
|
||||
);
|
||||
tab_info!(
|
||||
"Episode: S{:02}E{:0>2}",
|
||||
self.season_number,
|
||||
self.episode_number
|
||||
);
|
||||
tab_info!(
|
||||
"Audio: {}",
|
||||
self.locales
|
||||
.iter()
|
||||
.map(|(a, _)| a.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
);
|
||||
let mut subtitles: Vec<Locale> = self.locales.iter().flat_map(|(_, s)| s.clone()).collect();
|
||||
real_dedup_vec(&mut subtitles);
|
||||
tab_info!(
|
||||
"Subtitles: {}",
|
||||
subtitles
|
||||
.into_iter()
|
||||
.map(|l| l.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
);
|
||||
tab_info!("Resolution: {}x{}", self.height, self.width);
|
||||
tab_info!("FPS: {:.2}", self.fps)
|
||||
}
|
||||
|
||||
pub fn is_special(&self) -> bool {
|
||||
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
|
||||
}
|
||||
|
||||
pub fn has_relative_fmt<S: AsRef<str>>(s: S) -> bool {
|
||||
return s.as_ref().contains("{relative_episode_number}")
|
||||
|| s.as_ref().contains("{relative_sequence_number}");
|
||||
}
|
||||
}
|
||||
73
crunchy-cli-core/src/utils/interactive_select.rs
Normal file
73
crunchy-cli-core/src/utils/interactive_select.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
use crate::utils::log::progress_pause;
|
||||
use crunchyroll_rs::Season;
|
||||
use dialoguer::console::Term;
|
||||
use dialoguer::MultiSelect;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
pub fn get_duplicated_seasons(seasons: &Vec<Season>) -> Vec<u32> {
|
||||
let mut season_number_counter = BTreeMap::<u32, u32>::new();
|
||||
for season in seasons {
|
||||
season_number_counter
|
||||
.entry(season.season_number)
|
||||
.and_modify(|c| *c += 1)
|
||||
.or_default();
|
||||
}
|
||||
season_number_counter
|
||||
.into_iter()
|
||||
.filter_map(|(k, v)| if v > 0 { Some(k) } else { None })
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn check_for_duplicated_seasons(seasons: &mut Vec<Season>) {
|
||||
let mut as_map = BTreeMap::new();
|
||||
for season in seasons.iter() {
|
||||
as_map
|
||||
.entry(season.season_number)
|
||||
.or_insert(vec![])
|
||||
.push(season)
|
||||
}
|
||||
|
||||
let duplicates: Vec<&Season> = as_map
|
||||
.into_values()
|
||||
.filter(|s| s.len() > 1)
|
||||
.flatten()
|
||||
.collect();
|
||||
progress_pause!();
|
||||
let _ = Term::stdout().clear_line();
|
||||
let keep = select(
|
||||
"Duplicated seasons were found. Select the one you want to download (space to select/deselect; enter to continue)",
|
||||
duplicates
|
||||
.iter()
|
||||
.map(|s| format!("Season {} ({})", s.season_number, s.title))
|
||||
.collect(),
|
||||
);
|
||||
progress_pause!();
|
||||
|
||||
let mut remove_ids = vec![];
|
||||
for (i, duplicate) in duplicates.into_iter().enumerate() {
|
||||
if !keep.contains(&i) {
|
||||
remove_ids.push(duplicate.id.clone())
|
||||
}
|
||||
}
|
||||
|
||||
seasons.retain(|s| !remove_ids.contains(&s.id));
|
||||
}
|
||||
|
||||
pub fn select(prompt: &str, input: Vec<String>) -> Vec<usize> {
|
||||
if input.is_empty() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let def: Vec<bool> = (0..input.len()).map(|_| true).collect();
|
||||
|
||||
let selection = MultiSelect::new()
|
||||
.with_prompt(prompt)
|
||||
.items(&input[..])
|
||||
.defaults(&def[..])
|
||||
.clear(false)
|
||||
.report(false)
|
||||
.interact_on(&Term::stdout())
|
||||
.unwrap_or_default();
|
||||
|
||||
selection
|
||||
}
|
||||
148
crunchy-cli-core/src/utils/locale.rs
Normal file
148
crunchy-cli-core/src/utils/locale.rs
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
use crunchyroll_rs::Locale;
|
||||
use log::warn;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
pub enum LanguageTagging {
|
||||
Default,
|
||||
IETF,
|
||||
}
|
||||
|
||||
impl LanguageTagging {
|
||||
pub fn parse(s: &str) -> Result<Self, String> {
|
||||
Ok(match s.to_lowercase().as_str() {
|
||||
"default" => Self::Default,
|
||||
"ietf" => Self::IETF,
|
||||
_ => return Err(format!("'{}' is not a valid language tagging", s)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn convert_locales(&self, locales: &[Locale]) -> Vec<String> {
|
||||
let ietf_language_codes = ietf_language_codes();
|
||||
let mut converted = vec![];
|
||||
|
||||
match &self {
|
||||
LanguageTagging::Default => {
|
||||
for locale in locales {
|
||||
let Some((_, available)) =
|
||||
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
|
||||
else {
|
||||
// if no matching IETF language code was found, just pass it as it is
|
||||
converted.push(locale.to_string());
|
||||
continue;
|
||||
};
|
||||
converted.push(available.first().unwrap().to_string())
|
||||
}
|
||||
}
|
||||
LanguageTagging::IETF => {
|
||||
for locale in locales {
|
||||
let Some((tag, _)) =
|
||||
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
|
||||
else {
|
||||
// if no matching IETF language code was found, just pass it as it is
|
||||
converted.push(locale.to_string());
|
||||
continue;
|
||||
};
|
||||
converted.push(tag.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
converted
|
||||
}
|
||||
|
||||
pub fn for_locale(&self, locale: &Locale) -> String {
|
||||
match &self {
|
||||
LanguageTagging::Default => ietf_language_codes()
|
||||
.iter()
|
||||
.find(|(_, l)| l.contains(locale))
|
||||
.map_or(locale.to_string(), |(_, l)| l[0].to_string()),
|
||||
LanguageTagging::IETF => ietf_language_codes()
|
||||
.iter()
|
||||
.find(|(_, l)| l.contains(locale))
|
||||
.map_or(locale.to_string(), |(tag, _)| tag.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_locales(locales: &[Locale]) -> Vec<Locale> {
|
||||
let ietf_language_codes = ietf_language_codes();
|
||||
let all_locales = Locale::all();
|
||||
|
||||
let mut resolved = vec![];
|
||||
for locale in locales {
|
||||
if all_locales.contains(locale) {
|
||||
resolved.push(locale.clone())
|
||||
} else if let Some((_, resolved_locales)) = ietf_language_codes
|
||||
.iter()
|
||||
.find(|(tag, _)| tag == &locale.to_string().as_str())
|
||||
{
|
||||
let (first, alternatives) = resolved_locales.split_first().unwrap();
|
||||
|
||||
resolved.push(first.clone());
|
||||
// ignoring `Locale::en_IN` because I think the majority of users which want english
|
||||
// audio / subs want the "actual" english version and not the hindi accent dub
|
||||
if !alternatives.is_empty() && resolved_locales.first().unwrap() != &Locale::en_IN {
|
||||
warn!("Resolving locale '{}' to '{}', but there are some alternatives: {}. If you an alternative instead, please write it completely out instead of '{}'", locale, first, alternatives.iter().map(|l| format!("'{l}'")).collect::<Vec<String>>().join(", "), locale)
|
||||
}
|
||||
} else {
|
||||
resolved.push(locale.clone());
|
||||
warn!("Unknown locale '{}'", locale)
|
||||
}
|
||||
}
|
||||
|
||||
resolved
|
||||
}
|
||||
|
||||
fn ietf_language_codes<'a>() -> Vec<(&'a str, Vec<Locale>)> {
|
||||
vec![
|
||||
("ar", vec![Locale::ar_ME, Locale::ar_SA]),
|
||||
("ca", vec![Locale::ca_ES]),
|
||||
("de", vec![Locale::de_DE]),
|
||||
("en", vec![Locale::en_US, Locale::hi_IN]),
|
||||
("es", vec![Locale::es_ES, Locale::es_419, Locale::es_LA]),
|
||||
("fr", vec![Locale::fr_FR]),
|
||||
("hi", vec![Locale::hi_IN]),
|
||||
("id", vec![Locale::id_ID]),
|
||||
("it", vec![Locale::it_IT]),
|
||||
("ja", vec![Locale::ja_JP]),
|
||||
("ko", vec![Locale::ko_KR]),
|
||||
("ms", vec![Locale::ms_MY]),
|
||||
("pl", vec![Locale::pl_PL]),
|
||||
("pt", vec![Locale::pt_PT, Locale::pt_BR]),
|
||||
("ru", vec![Locale::ru_RU]),
|
||||
("ta", vec![Locale::ta_IN]),
|
||||
("te", vec![Locale::te_IN]),
|
||||
("th", vec![Locale::th_TH]),
|
||||
("tr", vec![Locale::tr_TR]),
|
||||
("vi", vec![Locale::vi_VN]),
|
||||
("zh", vec![Locale::zh_CN, Locale::zh_HK, Locale::zh_TW]),
|
||||
]
|
||||
}
|
||||
|
||||
/// Return the locale of the system.
|
||||
pub fn system_locale() -> Locale {
|
||||
if let Some(system_locale) = sys_locale::get_locale() {
|
||||
let locale = Locale::from(system_locale);
|
||||
if let Locale::Custom(_) = locale {
|
||||
Locale::en_US
|
||||
} else {
|
||||
locale
|
||||
}
|
||||
} else {
|
||||
Locale::en_US
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if [`Locale::Custom("all")`] is in the provided locale list and return [`Locale::all`] if
|
||||
/// so. If not, just return the provided locale list.
|
||||
pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> {
|
||||
if locales
|
||||
.iter()
|
||||
.any(|l| l.to_string().to_lowercase().trim() == "all")
|
||||
{
|
||||
Locale::all()
|
||||
} else {
|
||||
locales
|
||||
}
|
||||
}
|
||||
186
crunchy-cli-core/src/utils/log.rs
Normal file
186
crunchy-cli-core/src/utils/log.rs
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle};
|
||||
use log::{
|
||||
info, set_boxed_logger, set_max_level, Level, LevelFilter, Log, Metadata, Record,
|
||||
SetLoggerError,
|
||||
};
|
||||
use std::io::{stdout, Write};
|
||||
use std::sync::Mutex;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct ProgressHandler {
|
||||
pub(crate) stopped: bool,
|
||||
}
|
||||
|
||||
impl Drop for ProgressHandler {
|
||||
fn drop(&mut self) {
|
||||
if !self.stopped {
|
||||
info!(target: "progress_end", "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProgressHandler {
|
||||
pub(crate) fn stop<S: AsRef<str>>(mut self, msg: S) {
|
||||
self.stopped = true;
|
||||
info!(target: "progress_end", "{}", msg.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! progress {
|
||||
($($arg:tt)+) => {
|
||||
{
|
||||
log::info!(target: "progress", $($arg)+);
|
||||
$crate::utils::log::ProgressHandler{stopped: false}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) use progress;
|
||||
|
||||
macro_rules! progress_pause {
|
||||
() => {
|
||||
{
|
||||
log::info!(target: "progress_pause", "")
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) use progress_pause;
|
||||
|
||||
macro_rules! tab_info {
|
||||
($($arg:tt)+) => {
|
||||
if log::max_level() == log::LevelFilter::Debug {
|
||||
info!($($arg)+)
|
||||
} else {
|
||||
info!("\t{}", format!($($arg)+))
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) use tab_info;
|
||||
|
||||
pub struct CliLogger {
|
||||
level: LevelFilter,
|
||||
progress: Mutex<Option<ProgressBar>>,
|
||||
}
|
||||
|
||||
impl Log for CliLogger {
|
||||
fn enabled(&self, metadata: &Metadata) -> bool {
|
||||
metadata.level() <= self.level
|
||||
}
|
||||
|
||||
fn log(&self, record: &Record) {
|
||||
if !self.enabled(record.metadata())
|
||||
|| (record.target() != "progress"
|
||||
&& record.target() != "progress_pause"
|
||||
&& record.target() != "progress_end"
|
||||
&& !record.target().starts_with("crunchy_cli"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if self.level >= LevelFilter::Debug {
|
||||
self.extended(record);
|
||||
return;
|
||||
}
|
||||
|
||||
match record.target() {
|
||||
"progress" => self.progress(record, false),
|
||||
"progress_pause" => {
|
||||
let progress = self.progress.lock().unwrap();
|
||||
if let Some(p) = &*progress {
|
||||
p.set_draw_target(if p.is_hidden() {
|
||||
ProgressDrawTarget::stdout()
|
||||
} else {
|
||||
ProgressDrawTarget::hidden()
|
||||
})
|
||||
}
|
||||
}
|
||||
"progress_end" => self.progress(record, true),
|
||||
_ => {
|
||||
if self.progress.lock().unwrap().is_some() {
|
||||
self.progress(record, false)
|
||||
} else if record.level() > Level::Warn {
|
||||
self.normal(record)
|
||||
} else {
|
||||
self.error(record)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {
|
||||
let _ = stdout().flush();
|
||||
}
|
||||
}
|
||||
|
||||
impl CliLogger {
|
||||
pub fn new(level: LevelFilter) -> Self {
|
||||
Self {
|
||||
level,
|
||||
progress: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(level: LevelFilter) -> Result<(), SetLoggerError> {
|
||||
set_max_level(level);
|
||||
set_boxed_logger(Box::new(CliLogger::new(level)))
|
||||
}
|
||||
|
||||
fn extended(&self, record: &Record) {
|
||||
println!(
|
||||
"[{}] {} {} ({}) {}",
|
||||
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S"),
|
||||
record.level(),
|
||||
// replace the 'progress' prefix if this function is invoked via 'progress!'
|
||||
record
|
||||
.target()
|
||||
.replacen("crunchy_cli_core", "crunchy_cli", 1)
|
||||
.replacen("progress_end", "crunchy_cli", 1)
|
||||
.replacen("progress", "crunchy_cli", 1),
|
||||
format!("{:?}", thread::current().id())
|
||||
.replace("ThreadId(", "")
|
||||
.replace(')', ""),
|
||||
record.args()
|
||||
)
|
||||
}
|
||||
|
||||
fn normal(&self, record: &Record) {
|
||||
println!(":: {}", record.args())
|
||||
}
|
||||
|
||||
fn error(&self, record: &Record) {
|
||||
eprintln!(":: {}", record.args())
|
||||
}
|
||||
|
||||
fn progress(&self, record: &Record, stop: bool) {
|
||||
let mut progress = self.progress.lock().unwrap();
|
||||
|
||||
let msg = format!("{}", record.args());
|
||||
if stop && progress.is_some() {
|
||||
if msg.is_empty() {
|
||||
progress.take().unwrap().finish()
|
||||
} else {
|
||||
progress.take().unwrap().finish_with_message(msg)
|
||||
}
|
||||
} else if let Some(p) = &*progress {
|
||||
p.println(format!(":: → {}", msg))
|
||||
} else {
|
||||
#[cfg(not(windows))]
|
||||
let finish_str = "✔";
|
||||
#[cfg(windows)]
|
||||
// windows does not support all unicode characters by default in their consoles, so
|
||||
// we're using this (square root) symbol instead. microsoft.
|
||||
let finish_str = "√";
|
||||
|
||||
let pb = ProgressBar::new_spinner();
|
||||
pb.set_style(
|
||||
ProgressStyle::with_template(":: {spinner} {msg}")
|
||||
.unwrap()
|
||||
.tick_strings(&["—", "\\", "|", "/", finish_str]),
|
||||
);
|
||||
pb.set_draw_target(ProgressDrawTarget::stdout());
|
||||
pb.enable_steady_tick(Duration::from_millis(200));
|
||||
pb.set_message(msg);
|
||||
*progress = Some(pb)
|
||||
}
|
||||
}
|
||||
}
|
||||
15
crunchy-cli-core/src/utils/mod.rs
Normal file
15
crunchy-cli-core/src/utils/mod.rs
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
pub mod clap;
|
||||
pub mod context;
|
||||
pub mod download;
|
||||
pub mod ffmpeg;
|
||||
pub mod filter;
|
||||
pub mod fmt;
|
||||
pub mod format;
|
||||
pub mod interactive_select;
|
||||
pub mod locale;
|
||||
pub mod log;
|
||||
pub mod os;
|
||||
pub mod parse;
|
||||
pub mod rate_limit;
|
||||
pub mod sync;
|
||||
pub mod video;
|
||||
225
crunchy-cli-core/src/utils/os.rs
Normal file
225
crunchy-cli-core/src/utils/os.rs
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
use log::debug;
|
||||
use regex::{Regex, RegexBuilder};
|
||||
use std::borrow::Cow;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::task::{Context, Poll};
|
||||
use std::{env, fs, io};
|
||||
use tempfile::{Builder, NamedTempFile, TempPath};
|
||||
use tokio::io::{AsyncRead, ReadBuf};
|
||||
|
||||
pub fn has_ffmpeg() -> bool {
|
||||
if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() {
|
||||
if ErrorKind::NotFound != e.kind() {
|
||||
debug!(
|
||||
"unknown error occurred while checking if ffmpeg exists: {}",
|
||||
e.kind()
|
||||
)
|
||||
}
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir
|
||||
/// provided by the os.
|
||||
pub fn temp_directory() -> PathBuf {
|
||||
env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), PathBuf::from)
|
||||
}
|
||||
|
||||
/// Any tempfile should be created with this function. The prefix and directory of every file
|
||||
/// created with this function stays the same which is helpful to query all existing tempfiles and
|
||||
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
|
||||
/// setting the wrong prefix if done manually.
|
||||
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
|
||||
let tempfile = Builder::default()
|
||||
.prefix(".crunchy-cli_")
|
||||
.suffix(suffix.as_ref())
|
||||
.tempfile_in(temp_directory())?;
|
||||
debug!(
|
||||
"Created temporary file: {}",
|
||||
tempfile.path().to_string_lossy()
|
||||
);
|
||||
Ok(tempfile)
|
||||
}
|
||||
|
||||
pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> {
|
||||
let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref()));
|
||||
fs::create_dir_all(&cache_dir)?;
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
pub struct TempNamedPipe {
|
||||
path: TempPath,
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
reader: tokio::net::unix::pipe::Receiver,
|
||||
#[cfg(target_os = "windows")]
|
||||
file: tokio::fs::File,
|
||||
}
|
||||
|
||||
impl TempNamedPipe {
|
||||
pub fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncRead for TempNamedPipe {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<io::Result<()>> {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
return Pin::new(&mut self.reader).poll_read(cx, buf);
|
||||
// very very dirty implementation of a 'tail' like behavior
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let mut tmp_bytes = vec![0; buf.remaining()];
|
||||
let mut tmp_buf = ReadBuf::new(tmp_bytes.as_mut_slice());
|
||||
|
||||
loop {
|
||||
return match Pin::new(&mut self.file).poll_read(cx, &mut tmp_buf) {
|
||||
Poll::Ready(r) => {
|
||||
if r.is_ok() {
|
||||
if !tmp_buf.filled().is_empty() {
|
||||
buf.put_slice(tmp_buf.filled())
|
||||
} else {
|
||||
// sleep to not loop insanely fast and consume unnecessary system resources
|
||||
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Poll::Ready(r)
|
||||
}
|
||||
Poll::Pending => Poll::Pending,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TempNamedPipe {
|
||||
fn drop(&mut self) {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let _ = nix::unistd::unlink(self.path.to_string_lossy().to_string().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn temp_named_pipe() -> io::Result<TempNamedPipe> {
|
||||
let tmp = tempfile("")?;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let path = tmp.into_temp_path();
|
||||
let _ = fs::remove_file(&path);
|
||||
|
||||
nix::unistd::mkfifo(
|
||||
path.to_string_lossy().to_string().as_str(),
|
||||
nix::sys::stat::Mode::S_IRWXU,
|
||||
)?;
|
||||
|
||||
Ok(TempNamedPipe {
|
||||
reader: tokio::net::unix::pipe::OpenOptions::new().open_receiver(&path)?,
|
||||
path,
|
||||
})
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let (file, path) = tmp.into_parts();
|
||||
|
||||
Ok(TempNamedPipe {
|
||||
file: tokio::fs::File::from_std(file),
|
||||
path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the given path exists and rename it until the new (renamed) file does not exist.
|
||||
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
|
||||
// do not rename it if it exists but is a special file
|
||||
if is_special_file(&path) {
|
||||
return (path, false);
|
||||
}
|
||||
|
||||
let mut i = 0;
|
||||
while path.exists() {
|
||||
i += 1;
|
||||
|
||||
let mut ext = path.extension().unwrap_or_default().to_str().unwrap();
|
||||
let mut filename = path.file_stem().unwrap_or_default().to_str().unwrap();
|
||||
|
||||
// if the extension is empty, the filename without extension is probably empty
|
||||
// (e.g. `.mp4`). in this case Rust assumes that `.mp4` is the file stem rather than the
|
||||
// extension. if this is the case, set the extension to the file stem and make the file stem
|
||||
// empty
|
||||
if ext.is_empty() {
|
||||
ext = filename;
|
||||
filename = "";
|
||||
}
|
||||
|
||||
if filename.ends_with(&format!(" ({})", i - 1)) {
|
||||
filename = filename.strip_suffix(&format!(" ({})", i - 1)).unwrap();
|
||||
}
|
||||
|
||||
path.set_file_name(format!("{} ({}).{}", filename, i, ext))
|
||||
}
|
||||
(path, i != 0)
|
||||
}
|
||||
|
||||
/// Check if the given path is a special file. On Linux this is probably a pipe and on Windows
|
||||
/// ¯\_(ツ)_/¯
|
||||
pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool {
|
||||
path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir()
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref WINDOWS_NON_PRINTABLE_RE: Regex = Regex::new(r"[\x00-\x1f\x80-\x9f]").unwrap();
|
||||
static ref WINDOWS_ILLEGAL_RE: Regex = Regex::new(r#"[<>:"|?*]"#).unwrap();
|
||||
static ref WINDOWS_RESERVED_RE: Regex = RegexBuilder::new(r"(?i)^(con|prn|aux|nul|com[0-9]|lpt[0-9])(\..*)?$")
|
||||
.case_insensitive(true)
|
||||
.build()
|
||||
.unwrap();
|
||||
static ref WINDOWS_TRAILING_RE: Regex = Regex::new(r"[\. ]+$").unwrap();
|
||||
|
||||
static ref LINUX_NON_PRINTABLE: Regex = Regex::new(r"[\x00]").unwrap();
|
||||
|
||||
static ref RESERVED_RE: Regex = Regex::new(r"^\.+$").unwrap();
|
||||
}
|
||||
|
||||
/// Sanitizes a filename with the option to include/exclude the path separator from sanitizing.
|
||||
pub fn sanitize<S: AsRef<str>>(path: S, include_path_separator: bool, universal: bool) -> String {
|
||||
let path = Cow::from(path.as_ref().trim());
|
||||
|
||||
let path = RESERVED_RE.replace(&path, "");
|
||||
|
||||
let collect = |name: String| {
|
||||
if name.len() > 255 {
|
||||
name[..255].to_string()
|
||||
} else {
|
||||
name
|
||||
}
|
||||
};
|
||||
|
||||
if universal || cfg!(windows) {
|
||||
let path = WINDOWS_NON_PRINTABLE_RE.replace_all(&path, "");
|
||||
let path = WINDOWS_ILLEGAL_RE.replace_all(&path, "");
|
||||
let path = WINDOWS_RESERVED_RE.replace_all(&path, "");
|
||||
let path = WINDOWS_TRAILING_RE.replace(&path, "");
|
||||
let mut path = path.to_string();
|
||||
if include_path_separator {
|
||||
path = path.replace(['\\', '/'], "");
|
||||
}
|
||||
collect(path)
|
||||
} else {
|
||||
let path = LINUX_NON_PRINTABLE.replace_all(&path, "");
|
||||
let mut path = path.to_string();
|
||||
if include_path_separator {
|
||||
path = path.replace('/', "");
|
||||
}
|
||||
collect(path)
|
||||
}
|
||||
}
|
||||
207
crunchy-cli-core/src/utils/parse.rs
Normal file
207
crunchy-cli-core/src/utils/parse.rs
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
use anyhow::{anyhow, bail, Result};
|
||||
use crunchyroll_rs::media::Resolution;
|
||||
use crunchyroll_rs::{Crunchyroll, MediaCollection, UrlType};
|
||||
use log::debug;
|
||||
use regex::Regex;
|
||||
|
||||
/// Define a find, based on season and episode number to find episodes / movies.
|
||||
/// If a struct instance equals the [`Default::default()`] it's considered that no find is applied.
|
||||
/// If `from_*` is [`None`] they're set to [`u32::MIN`].
|
||||
/// If `to_*` is [`None`] they're set to [`u32::MAX`].
|
||||
#[derive(Debug, Default)]
|
||||
pub struct InnerUrlFilter {
|
||||
from_episode: Option<f32>,
|
||||
to_episode: Option<f32>,
|
||||
from_season: Option<u32>,
|
||||
to_season: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UrlFilter {
|
||||
inner: Vec<InnerUrlFilter>,
|
||||
}
|
||||
|
||||
impl Default for UrlFilter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
inner: vec![InnerUrlFilter::default()],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UrlFilter {
|
||||
pub fn is_season_valid(&self, season: u32) -> bool {
|
||||
self.inner.iter().any(|f| {
|
||||
let from_season = f.from_season.unwrap_or(u32::MIN);
|
||||
let to_season = f.to_season.unwrap_or(u32::MAX);
|
||||
|
||||
season >= from_season && season <= to_season
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_episode_valid(&self, episode: f32, season: u32) -> bool {
|
||||
self.inner.iter().any(|f| {
|
||||
let from_episode = f.from_episode.unwrap_or(f32::MIN);
|
||||
let to_episode = f.to_episode.unwrap_or(f32::MAX);
|
||||
let from_season = f.from_season.unwrap_or(u32::MIN);
|
||||
let to_season = f.to_season.unwrap_or(u32::MAX);
|
||||
|
||||
if season < from_season || season > to_season {
|
||||
false
|
||||
} else if season == from_season || (f.from_season.is_none() && f.to_season.is_none()) {
|
||||
episode >= from_episode && episode <= to_episode
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a url and return all [`crunchyroll_rs::Media<crunchyroll_rs::Episode>`] &
|
||||
/// [`crunchyroll_rs::Media<crunchyroll_rs::Movie>`] which could be related to it.
|
||||
///
|
||||
/// The `with_filter` arguments says if filtering should be enabled for the url. Filtering is a
|
||||
/// specific pattern at the end of the url which declares which parts of the url content should be
|
||||
/// returned / filtered (out). _This only works if the url points to a series_.
|
||||
///
|
||||
/// Examples how filtering works:
|
||||
/// - `...[E5]` - Download the fifth episode.
|
||||
/// - `...[S1]` - Download the full first season.
|
||||
/// - `...[-S2]` - Download all seasons up to and including season 2.
|
||||
/// - `...[S3E4-]` - Download all episodes from and including season 3, episode 4.
|
||||
/// - `...[S1E4-S3]` - Download all episodes from and including season 1, episode 4, until andincluding season 3.
|
||||
/// - `...[S3,S5]` - Download episode 3 and 5.
|
||||
/// - `...[S1-S3,S4E2-S4E6]` - Download season 1 to 3 and episode 2 to episode 6 of season 4.
|
||||
|
||||
/// In practice, it would look like this: `https://crunchyroll.com/series/12345678/example[S1E5-S3E2]`.
|
||||
pub async fn parse_url(
|
||||
crunchy: &Crunchyroll,
|
||||
mut url: String,
|
||||
with_filter: bool,
|
||||
) -> Result<(MediaCollection, UrlFilter)> {
|
||||
let url_filter = if with_filter {
|
||||
debug!("Url may contain filters");
|
||||
|
||||
let open_index = url.rfind('[').unwrap_or(0);
|
||||
let close_index = url.rfind(']').unwrap_or(0);
|
||||
|
||||
let filter = if open_index < close_index {
|
||||
let filter = url.as_str()[open_index + 1..close_index].to_string();
|
||||
url = url.as_str()[0..open_index].to_string();
|
||||
filter
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let filter_regex = Regex::new(r"((S(?P<from_season>\d+))?(E(?P<from_episode>\d+))?)(((?P<dash>-)((S(?P<to_season>\d+))?(E(?P<to_episode>\d+))?))?)(,|$)").unwrap();
|
||||
|
||||
let mut filters = vec![];
|
||||
|
||||
for capture in filter_regex.captures_iter(&filter) {
|
||||
let dash = capture.name("dash").is_some();
|
||||
let from_episode = capture
|
||||
.name("from_episode")
|
||||
.map_or(anyhow::Ok(None), |fe| Ok(Some(fe.as_str().parse()?)))?;
|
||||
let to_episode = capture
|
||||
.name("to_episode")
|
||||
.map_or(anyhow::Ok(if dash { None } else { from_episode }), |te| {
|
||||
Ok(Some(te.as_str().parse()?))
|
||||
})?;
|
||||
let from_season = capture
|
||||
.name("from_season")
|
||||
.map_or(anyhow::Ok(None), |fs| Ok(Some(fs.as_str().parse()?)))?;
|
||||
let to_season = capture
|
||||
.name("to_season")
|
||||
.map_or(anyhow::Ok(if dash { None } else { from_season }), |ts| {
|
||||
Ok(Some(ts.as_str().parse()?))
|
||||
})?;
|
||||
|
||||
filters.push(InnerUrlFilter {
|
||||
from_episode,
|
||||
to_episode,
|
||||
from_season,
|
||||
to_season,
|
||||
})
|
||||
}
|
||||
|
||||
let url_filter = UrlFilter { inner: filters };
|
||||
|
||||
debug!("Url find: {:?}", url_filter);
|
||||
|
||||
url_filter
|
||||
} else {
|
||||
UrlFilter::default()
|
||||
};
|
||||
|
||||
// check if the url is the old series/episode scheme which still occurs in some places (like the
|
||||
// rss)
|
||||
let old_url_regex = Regex::new(r"https?://(www\.)?crunchyroll\.com/.+").unwrap();
|
||||
if old_url_regex.is_match(&url) {
|
||||
debug!("Detected maybe old url");
|
||||
// replace the 'http' prefix with 'https' as http is not supported by the reqwest client
|
||||
if url.starts_with("http://") {
|
||||
url.replace_range(0..4, "https")
|
||||
}
|
||||
// the old url redirects to the new url. request the old url, follow the redirects and
|
||||
// extract the final url
|
||||
url = crunchy.client().get(&url).send().await?.url().to_string()
|
||||
}
|
||||
|
||||
let parsed_url = crunchyroll_rs::parse_url(url).ok_or(anyhow!("Invalid url"))?;
|
||||
debug!("Url type: {:?}", parsed_url);
|
||||
let media_collection = match parsed_url {
|
||||
UrlType::Series(id)
|
||||
| UrlType::MovieListing(id)
|
||||
| UrlType::EpisodeOrMovie(id)
|
||||
| UrlType::MusicVideo(id)
|
||||
| UrlType::Concert(id) => crunchy.media_collection_from_id(id).await?,
|
||||
};
|
||||
|
||||
Ok((media_collection, url_filter))
|
||||
}
|
||||
|
||||
/// Parse a resolution given as a [`String`] to a [`crunchyroll_rs::media::Resolution`].
|
||||
pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
|
||||
resolution = resolution.to_lowercase();
|
||||
|
||||
if resolution == "best" {
|
||||
Ok(Resolution {
|
||||
width: u64::MAX,
|
||||
height: u64::MAX,
|
||||
})
|
||||
} else if resolution == "worst" {
|
||||
Ok(Resolution {
|
||||
width: u64::MIN,
|
||||
height: u64::MIN,
|
||||
})
|
||||
} else if resolution.ends_with('p') {
|
||||
let without_p = resolution.as_str()[0..resolution.len() - 1]
|
||||
.parse()
|
||||
.map_err(|_| anyhow!("Could not find resolution"))?;
|
||||
Ok(Resolution {
|
||||
width: without_p * 16 / 9,
|
||||
height: without_p,
|
||||
})
|
||||
} else if let Some((w, h)) = resolution.split_once('x') {
|
||||
Ok(Resolution {
|
||||
width: w
|
||||
.parse()
|
||||
.map_err(|_| anyhow!("Could not find resolution"))?,
|
||||
height: h
|
||||
.parse()
|
||||
.map_err(|_| anyhow!("Could not find resolution"))?,
|
||||
})
|
||||
} else {
|
||||
bail!("Could not find resolution")
|
||||
}
|
||||
}
|
||||
|
||||
/// Dirty implementation of [`f32::fract`] with more accuracy.
|
||||
pub fn fract(input: f32) -> f32 {
|
||||
if input.fract() == 0.0 {
|
||||
return 0.0;
|
||||
}
|
||||
format!("0.{}", input.to_string().split('.').last().unwrap())
|
||||
.parse::<f32>()
|
||||
.unwrap()
|
||||
}
|
||||
73
crunchy-cli-core/src/utils/rate_limit.rs
Normal file
73
crunchy-cli-core/src/utils/rate_limit.rs
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
use async_speed_limit::Limiter;
|
||||
use crunchyroll_rs::error::Error;
|
||||
use futures_util::TryStreamExt;
|
||||
use reqwest::{Client, Request, Response, ResponseBuilderExt};
|
||||
use std::future::Future;
|
||||
use std::io;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::task::{Context, Poll};
|
||||
use tower_service::Service;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RateLimiterService {
|
||||
client: Arc<Client>,
|
||||
rate_limiter: Limiter,
|
||||
}
|
||||
|
||||
impl RateLimiterService {
|
||||
pub fn new(bytes: u32, client: Client) -> Self {
|
||||
Self {
|
||||
client: Arc::new(client),
|
||||
rate_limiter: Limiter::new(bytes as f64),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service<Request> for RateLimiterService {
|
||||
type Response = Response;
|
||||
type Error = Error;
|
||||
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, req: Request) -> Self::Future {
|
||||
let client = self.client.clone();
|
||||
let rate_limiter = self.rate_limiter.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let mut body = vec![];
|
||||
let res = client.execute(req).await?;
|
||||
let _url = res.url().clone().to_string();
|
||||
let url = _url.as_str();
|
||||
|
||||
let mut http_res = http::Response::builder()
|
||||
.url(res.url().clone())
|
||||
.status(res.status())
|
||||
.version(res.version());
|
||||
*http_res.headers_mut().unwrap() = res.headers().clone();
|
||||
http_res
|
||||
.extensions_ref()
|
||||
.unwrap()
|
||||
.clone_from(&res.extensions());
|
||||
|
||||
let limiter = rate_limiter.limit(
|
||||
res.bytes_stream()
|
||||
.map_err(io::Error::other)
|
||||
.into_async_read(),
|
||||
);
|
||||
|
||||
futures_util::io::copy(limiter, &mut body)
|
||||
.await
|
||||
.map_err(|e| Error::Request {
|
||||
url: url.to_string(),
|
||||
status: None,
|
||||
message: e.to_string(),
|
||||
})?;
|
||||
|
||||
Ok(Response::from(http_res.body(body).unwrap()))
|
||||
})
|
||||
}
|
||||
}
|
||||
432
crunchy-cli-core/src/utils/sync.rs
Normal file
432
crunchy-cli-core/src/utils/sync.rs
Normal file
|
|
@ -0,0 +1,432 @@
|
|||
use std::io::Read;
|
||||
use std::process::Stdio;
|
||||
use std::{
|
||||
cmp,
|
||||
collections::{HashMap, HashSet},
|
||||
mem,
|
||||
ops::Not,
|
||||
path::Path,
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use chrono::TimeDelta;
|
||||
use crunchyroll_rs::Locale;
|
||||
use log::debug;
|
||||
use tempfile::TempPath;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use rusty_chromaprint::{Configuration, Fingerprinter};
|
||||
|
||||
use super::fmt::format_time_delta;
|
||||
|
||||
pub struct SyncAudio {
|
||||
pub format_id: usize,
|
||||
pub path: TempPath,
|
||||
pub locale: Locale,
|
||||
pub sample_rate: u32,
|
||||
pub video_idx: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct TimeRange {
|
||||
start: f64,
|
||||
end: f64,
|
||||
}
|
||||
|
||||
pub fn sync_audios(
|
||||
available_audios: &Vec<SyncAudio>,
|
||||
sync_tolerance: u32,
|
||||
sync_precision: u32,
|
||||
) -> Result<Option<HashMap<usize, TimeDelta>>> {
|
||||
let mut result: HashMap<usize, TimeDelta> = HashMap::new();
|
||||
|
||||
let mut sync_audios = vec![];
|
||||
let mut chromaprints = HashMap::new();
|
||||
let mut formats = HashSet::new();
|
||||
for audio in available_audios {
|
||||
if formats.contains(&audio.format_id) {
|
||||
continue;
|
||||
}
|
||||
formats.insert(audio.format_id);
|
||||
sync_audios.push((audio.format_id, &audio.path, audio.sample_rate));
|
||||
chromaprints.insert(
|
||||
audio.format_id,
|
||||
generate_chromaprint(
|
||||
&audio.path,
|
||||
audio.sample_rate,
|
||||
&TimeDelta::zero(),
|
||||
&TimeDelta::zero(),
|
||||
&TimeDelta::zero(),
|
||||
)?,
|
||||
);
|
||||
}
|
||||
sync_audios.sort_by_key(|sync_audio| chromaprints.get(&sync_audio.0).unwrap().len());
|
||||
|
||||
let base_audio = sync_audios.remove(0);
|
||||
|
||||
let mut start = f64::MAX;
|
||||
let mut end = f64::MIN;
|
||||
let mut initial_offsets = HashMap::new();
|
||||
for audio in &sync_audios {
|
||||
debug!(
|
||||
"Initial comparison of format {} to {}",
|
||||
audio.0, &base_audio.0
|
||||
);
|
||||
|
||||
let (lhs_ranges, rhs_ranges) = compare_chromaprints(
|
||||
chromaprints.get(&base_audio.0).unwrap(),
|
||||
chromaprints.get(&audio.0).unwrap(),
|
||||
sync_tolerance,
|
||||
);
|
||||
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
|
||||
bail!(
|
||||
"Failed to sync videos, couldn't find matching audio parts between format {} and {}",
|
||||
base_audio.0 + 1,
|
||||
audio.0 + 1
|
||||
);
|
||||
}
|
||||
let lhs_range = lhs_ranges[0];
|
||||
let rhs_range = rhs_ranges[0];
|
||||
start = start.min(lhs_range.start);
|
||||
end = end.max(lhs_range.end);
|
||||
start = start.min(rhs_range.start);
|
||||
end = end.max(rhs_range.end);
|
||||
let offset = TimeDelta::milliseconds(((rhs_range.start - lhs_range.start) * 1000.0) as i64);
|
||||
initial_offsets.insert(audio.0, TimeDelta::zero().checked_sub(&offset).unwrap());
|
||||
debug!(
|
||||
"Found initial offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
|
||||
offset.num_milliseconds(),
|
||||
lhs_range.start,
|
||||
lhs_range.end,
|
||||
lhs_range.end - lhs_range.start,
|
||||
rhs_range.start,
|
||||
rhs_range.end,
|
||||
rhs_range.end - rhs_range.start,
|
||||
audio.0,
|
||||
base_audio.0
|
||||
);
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Found matching audio parts at {} - {}, narrowing search",
|
||||
start, end
|
||||
);
|
||||
|
||||
let start = TimeDelta::milliseconds((start * 1000.0) as i64 - 20000);
|
||||
let end = TimeDelta::milliseconds((end * 1000.0) as i64 + 20000);
|
||||
|
||||
for sync_audio in &sync_audios {
|
||||
let chromaprint = generate_chromaprint(
|
||||
sync_audio.1,
|
||||
sync_audio.2,
|
||||
&start,
|
||||
&end,
|
||||
initial_offsets.get(&sync_audio.0).unwrap(),
|
||||
)?;
|
||||
chromaprints.insert(sync_audio.0, chromaprint);
|
||||
}
|
||||
|
||||
let mut runs: HashMap<usize, i64> = HashMap::new();
|
||||
let iterator_range_limits: i64 = 2 ^ sync_precision as i64;
|
||||
for i in -iterator_range_limits..=iterator_range_limits {
|
||||
let base_offset = TimeDelta::milliseconds(
|
||||
((0.128 / iterator_range_limits as f64 * i as f64) * 1000.0) as i64,
|
||||
);
|
||||
chromaprints.insert(
|
||||
base_audio.0,
|
||||
generate_chromaprint(base_audio.1, base_audio.2, &start, &end, &base_offset)?,
|
||||
);
|
||||
for audio in &sync_audios {
|
||||
let initial_offset = initial_offsets.get(&audio.0).copied().unwrap();
|
||||
let offset = find_offset(
|
||||
(&base_audio.0, chromaprints.get(&base_audio.0).unwrap()),
|
||||
&base_offset,
|
||||
(&audio.0, chromaprints.get(&audio.0).unwrap()),
|
||||
&initial_offset,
|
||||
&start,
|
||||
sync_tolerance,
|
||||
);
|
||||
if offset.is_none() {
|
||||
continue;
|
||||
}
|
||||
let offset = offset.unwrap();
|
||||
|
||||
result.insert(
|
||||
audio.0,
|
||||
result
|
||||
.get(&audio.0)
|
||||
.copied()
|
||||
.unwrap_or_default()
|
||||
.checked_add(&offset)
|
||||
.unwrap(),
|
||||
);
|
||||
runs.insert(audio.0, runs.get(&audio.0).copied().unwrap_or_default() + 1);
|
||||
}
|
||||
}
|
||||
let mut result: HashMap<usize, TimeDelta> = result
|
||||
.iter()
|
||||
.map(|(format_id, offset)| {
|
||||
(
|
||||
*format_id,
|
||||
TimeDelta::milliseconds(
|
||||
offset.num_milliseconds() / runs.get(format_id).copied().unwrap(),
|
||||
),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
result.insert(base_audio.0, TimeDelta::milliseconds(0));
|
||||
|
||||
Ok(Some(result))
|
||||
}
|
||||
|
||||
fn find_offset(
|
||||
lhs: (&usize, &Vec<u32>),
|
||||
lhs_shift: &TimeDelta,
|
||||
rhs: (&usize, &Vec<u32>),
|
||||
rhs_shift: &TimeDelta,
|
||||
start: &TimeDelta,
|
||||
sync_tolerance: u32,
|
||||
) -> Option<TimeDelta> {
|
||||
let (lhs_ranges, rhs_ranges) = compare_chromaprints(lhs.1, rhs.1, sync_tolerance);
|
||||
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let lhs_range = lhs_ranges[0];
|
||||
let rhs_range = rhs_ranges[0];
|
||||
let offset = rhs_range.end - lhs_range.end;
|
||||
let offset = TimeDelta::milliseconds((offset * 1000.0) as i64)
|
||||
.checked_add(lhs_shift)?
|
||||
.checked_sub(rhs_shift)?;
|
||||
debug!(
|
||||
"Found offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
|
||||
offset.num_milliseconds(),
|
||||
lhs_range.start + start.num_milliseconds() as f64 / 1000.0,
|
||||
lhs_range.end + start.num_milliseconds() as f64 / 1000.0,
|
||||
lhs_range.end - lhs_range.start,
|
||||
rhs_range.start + start.num_milliseconds() as f64 / 1000.0,
|
||||
rhs_range.end + start.num_milliseconds() as f64 / 1000.0,
|
||||
rhs_range.end - rhs_range.start,
|
||||
rhs.0,
|
||||
lhs.0
|
||||
);
|
||||
Some(offset)
|
||||
}
|
||||
|
||||
fn generate_chromaprint(
|
||||
input_file: &Path,
|
||||
sample_rate: u32,
|
||||
start: &TimeDelta,
|
||||
end: &TimeDelta,
|
||||
offset: &TimeDelta,
|
||||
) -> Result<Vec<u32>> {
|
||||
let mut ss_argument: &TimeDelta = &start.checked_sub(offset).unwrap();
|
||||
let mut offset_argument = &TimeDelta::zero();
|
||||
if *offset < TimeDelta::zero() {
|
||||
ss_argument = start;
|
||||
offset_argument = offset;
|
||||
};
|
||||
|
||||
let mut printer = Fingerprinter::new(&Configuration::preset_test1());
|
||||
printer.start(sample_rate, 2)?;
|
||||
|
||||
let mut command = Command::new("ffmpeg");
|
||||
command
|
||||
.arg("-hide_banner")
|
||||
.arg("-y")
|
||||
.args(["-ss", format_time_delta(ss_argument).as_str()]);
|
||||
|
||||
if end.is_zero().not() {
|
||||
command.args(["-to", format_time_delta(end).as_str()]);
|
||||
}
|
||||
|
||||
command
|
||||
.args(["-itsoffset", format_time_delta(offset_argument).as_str()])
|
||||
.args(["-i", input_file.to_string_lossy().to_string().as_str()])
|
||||
.args(["-ac", "2"])
|
||||
.args([
|
||||
"-f",
|
||||
if cfg!(target_endian = "big") {
|
||||
"s16be"
|
||||
} else {
|
||||
"s16le"
|
||||
},
|
||||
])
|
||||
.arg("-");
|
||||
|
||||
let mut handle = command
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
// the stdout is read in chunks because keeping all the raw audio data in memory would take up
|
||||
// a significant amount of space
|
||||
let mut stdout = handle.stdout.take().unwrap();
|
||||
let mut buf: [u8; 128_000] = [0; 128_000];
|
||||
while handle.try_wait()?.is_none() {
|
||||
loop {
|
||||
let read_bytes = stdout.read(&mut buf)?;
|
||||
if read_bytes == 0 {
|
||||
break;
|
||||
}
|
||||
let data: [i16; 64_000] = unsafe { mem::transmute(buf) };
|
||||
printer.consume(&data[0..(read_bytes / 2)])
|
||||
}
|
||||
}
|
||||
|
||||
if !handle.wait()?.success() {
|
||||
bail!("{}", std::io::read_to_string(handle.stderr.unwrap())?)
|
||||
}
|
||||
|
||||
printer.finish();
|
||||
return Ok(printer.fingerprint().into());
|
||||
}
|
||||
|
||||
fn compare_chromaprints(
|
||||
lhs_chromaprint: &Vec<u32>,
|
||||
rhs_chromaprint: &Vec<u32>,
|
||||
sync_tolerance: u32,
|
||||
) -> (Vec<TimeRange>, Vec<TimeRange>) {
|
||||
let lhs_inverse_index = create_inverse_index(lhs_chromaprint);
|
||||
let rhs_inverse_index = create_inverse_index(rhs_chromaprint);
|
||||
|
||||
let mut possible_shifts = HashSet::new();
|
||||
for lhs_pair in lhs_inverse_index {
|
||||
let original_point = lhs_pair.0;
|
||||
for i in -2..=2 {
|
||||
let modified_point = (original_point as i32 + i) as u32;
|
||||
if rhs_inverse_index.contains_key(&modified_point) {
|
||||
let rhs_index = rhs_inverse_index.get(&modified_point).copied().unwrap();
|
||||
possible_shifts.insert(rhs_index as i32 - lhs_pair.1 as i32);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut all_lhs_time_ranges = vec![];
|
||||
let mut all_rhs_time_ranges = vec![];
|
||||
for shift_amount in possible_shifts {
|
||||
let time_range_pair = find_time_ranges(
|
||||
lhs_chromaprint,
|
||||
rhs_chromaprint,
|
||||
shift_amount,
|
||||
sync_tolerance,
|
||||
);
|
||||
if time_range_pair.is_none() {
|
||||
continue;
|
||||
}
|
||||
let (mut lhs_time_ranges, mut rhs_time_ranges) = time_range_pair.unwrap();
|
||||
let mut lhs_time_ranges: Vec<TimeRange> = lhs_time_ranges
|
||||
.drain(..)
|
||||
.filter(|time_range| {
|
||||
(20.0 < (time_range.end - time_range.start))
|
||||
&& ((time_range.end - time_range.start) < 180.0)
|
||||
&& time_range.end > 0.0
|
||||
})
|
||||
.collect();
|
||||
lhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
|
||||
let mut rhs_time_ranges: Vec<TimeRange> = rhs_time_ranges
|
||||
.drain(..)
|
||||
.filter(|time_range| {
|
||||
(20.0 < (time_range.end - time_range.start))
|
||||
&& ((time_range.end - time_range.start) < 180.0)
|
||||
&& time_range.end > 0.0
|
||||
})
|
||||
.collect();
|
||||
rhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
|
||||
if lhs_time_ranges.is_empty() || rhs_time_ranges.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
all_lhs_time_ranges.push(lhs_time_ranges[0]);
|
||||
all_rhs_time_ranges.push(rhs_time_ranges[0]);
|
||||
}
|
||||
all_lhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
|
||||
all_lhs_time_ranges.reverse();
|
||||
all_rhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
|
||||
all_rhs_time_ranges.reverse();
|
||||
|
||||
(all_lhs_time_ranges, all_rhs_time_ranges)
|
||||
}
|
||||
|
||||
fn create_inverse_index(chromaprint: &Vec<u32>) -> HashMap<u32, usize> {
|
||||
let mut inverse_index = HashMap::with_capacity(chromaprint.capacity());
|
||||
for (i, fingerprint) in chromaprint.iter().enumerate().take(chromaprint.capacity()) {
|
||||
inverse_index.insert(*fingerprint, i);
|
||||
}
|
||||
inverse_index
|
||||
}
|
||||
|
||||
fn find_time_ranges(
|
||||
lhs_chromaprint: &[u32],
|
||||
rhs_chromaprint: &[u32],
|
||||
shift_amount: i32,
|
||||
sync_tolerance: u32,
|
||||
) -> Option<(Vec<TimeRange>, Vec<TimeRange>)> {
|
||||
let mut lhs_shift: i32 = 0;
|
||||
let mut rhs_shift: i32 = 0;
|
||||
if shift_amount < 0 {
|
||||
lhs_shift -= shift_amount;
|
||||
} else {
|
||||
rhs_shift += shift_amount;
|
||||
}
|
||||
|
||||
let mut lhs_matching_timestamps = vec![];
|
||||
let mut rhs_matching_timestamps = vec![];
|
||||
let upper_limit =
|
||||
cmp::min(lhs_chromaprint.len(), rhs_chromaprint.len()) as i32 - shift_amount.abs();
|
||||
|
||||
for i in 0..upper_limit {
|
||||
let lhs_position = i + lhs_shift;
|
||||
let rhs_position = i + rhs_shift;
|
||||
let difference = (lhs_chromaprint[lhs_position as usize]
|
||||
^ rhs_chromaprint[rhs_position as usize])
|
||||
.count_ones();
|
||||
|
||||
if difference > sync_tolerance {
|
||||
continue;
|
||||
}
|
||||
|
||||
lhs_matching_timestamps.push(lhs_position as f64 * 0.128);
|
||||
rhs_matching_timestamps.push(rhs_position as f64 * 0.128);
|
||||
}
|
||||
lhs_matching_timestamps.push(f64::MAX);
|
||||
rhs_matching_timestamps.push(f64::MAX);
|
||||
|
||||
let lhs_time_ranges = timestamps_to_ranges(lhs_matching_timestamps);
|
||||
lhs_time_ranges.as_ref()?;
|
||||
let lhs_time_ranges = lhs_time_ranges.unwrap();
|
||||
let rhs_time_ranges = timestamps_to_ranges(rhs_matching_timestamps).unwrap();
|
||||
|
||||
Some((lhs_time_ranges, rhs_time_ranges))
|
||||
}
|
||||
|
||||
fn timestamps_to_ranges(mut timestamps: Vec<f64>) -> Option<Vec<TimeRange>> {
|
||||
if timestamps.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
timestamps.sort_by(|a, b| a.total_cmp(b));
|
||||
|
||||
let mut time_ranges = vec![];
|
||||
let mut current_range = TimeRange {
|
||||
start: timestamps[0],
|
||||
end: timestamps[0],
|
||||
};
|
||||
|
||||
for i in 0..timestamps.len() - 1 {
|
||||
let current = timestamps[i];
|
||||
let next = timestamps[i + 1];
|
||||
if next - current <= 1.0 {
|
||||
current_range.end = next;
|
||||
continue;
|
||||
}
|
||||
|
||||
time_ranges.push(current_range);
|
||||
current_range.start = next;
|
||||
current_range.end = next;
|
||||
}
|
||||
if !time_ranges.is_empty() {
|
||||
Some(time_ranges)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
46
crunchy-cli-core/src/utils/video.rs
Normal file
46
crunchy-cli-core/src/utils/video.rs
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
use anyhow::{bail, Result};
|
||||
use crunchyroll_rs::media::{Resolution, Stream, StreamData};
|
||||
use crunchyroll_rs::Locale;
|
||||
|
||||
pub async fn stream_data_from_stream(
|
||||
stream: &Stream,
|
||||
resolution: &Resolution,
|
||||
hardsub_subtitle: Option<Locale>,
|
||||
) -> Result<Option<(StreamData, StreamData, bool)>> {
|
||||
let (hardsub_locale, mut contains_hardsub) = if hardsub_subtitle.is_some() {
|
||||
(hardsub_subtitle, true)
|
||||
} else {
|
||||
(None, false)
|
||||
};
|
||||
|
||||
let (mut videos, mut audios) = match stream.stream_data(hardsub_locale).await {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
// the error variant is only `crunchyroll_rs::error::Error::Input` when the requested
|
||||
// hardsub is not available
|
||||
if let crunchyroll_rs::error::Error::Input { .. } = e {
|
||||
contains_hardsub = false;
|
||||
stream.stream_data(None).await?
|
||||
} else {
|
||||
bail!(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
.unwrap();
|
||||
|
||||
if videos.iter().any(|v| v.drm.is_some()) || audios.iter().any(|v| v.drm.is_some()) {
|
||||
bail!("Stream is DRM protected")
|
||||
}
|
||||
|
||||
videos.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
||||
audios.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
|
||||
|
||||
let video_variant = match resolution.height {
|
||||
u64::MAX => Some(videos.into_iter().next().unwrap()),
|
||||
u64::MIN => Some(videos.into_iter().last().unwrap()),
|
||||
_ => videos
|
||||
.into_iter()
|
||||
.find(|v| resolution.height == v.resolution().unwrap().height),
|
||||
};
|
||||
Ok(video_variant.map(|v| (v, audios.first().unwrap().clone(), contains_hardsub)))
|
||||
}
|
||||
347
crunchyroll.go
347
crunchyroll.go
|
|
@ -1,347 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// LOCALE represents a locale / language
|
||||
type LOCALE string
|
||||
|
||||
const (
|
||||
JP LOCALE = "ja-JP"
|
||||
US = "en-US"
|
||||
LA = "es-LA"
|
||||
ES = "es-ES"
|
||||
FR = "fr-FR"
|
||||
BR = "pt-BR"
|
||||
IT = "it-IT"
|
||||
DE = "de-DE"
|
||||
RU = "ru-RU"
|
||||
ME = "ar-ME"
|
||||
)
|
||||
|
||||
type Crunchyroll struct {
|
||||
// Client is the http.Client to perform all requests over
|
||||
Client *http.Client
|
||||
// Locale specifies in which language all results should be returned / requested
|
||||
Locale LOCALE
|
||||
// SessionID is the crunchyroll session id which was used for authentication
|
||||
SessionID string
|
||||
|
||||
// Config stores parameters which are needed by some api calls
|
||||
Config struct {
|
||||
TokenType string
|
||||
AccessToken string
|
||||
|
||||
CountryCode string
|
||||
Premium bool
|
||||
Channel string
|
||||
Policy string
|
||||
Signature string
|
||||
KeyPairID string
|
||||
AccountID string
|
||||
ExternalID string
|
||||
MaturityRating string
|
||||
}
|
||||
}
|
||||
|
||||
// LoginWithCredentials logs in via crunchyroll email and password
|
||||
func LoginWithCredentials(email string, password string, locale LOCALE, client *http.Client) (*Crunchyroll, error) {
|
||||
sessionIDEndpoint := fmt.Sprintf("https://api.crunchyroll.com/start_session.0.json?version=1.0&access_token=%s&device_type=%s&device_id=%s",
|
||||
"LNDJgOit5yaRIWN", "com.crunchyroll.windows.desktop", "Az2srGnChW65fuxYz2Xxl1GcZQgtGgI")
|
||||
sessResp, err := client.Get(sessionIDEndpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer sessResp.Body.Close()
|
||||
|
||||
var data map[string]interface{}
|
||||
body, _ := ioutil.ReadAll(sessResp.Body)
|
||||
json.Unmarshal(body, &data)
|
||||
|
||||
sessionID := data["data"].(map[string]interface{})["session_id"].(string)
|
||||
|
||||
loginEndpoint := "https://api.crunchyroll.com/login.0.json"
|
||||
authValues := url.Values{}
|
||||
authValues.Set("session_id", sessionID)
|
||||
authValues.Set("account", email)
|
||||
authValues.Set("password", password)
|
||||
client.Post(loginEndpoint, "application/x-www-form-urlencoded", bytes.NewBufferString(authValues.Encode()))
|
||||
|
||||
return LoginWithSessionID(sessionID, locale, client)
|
||||
}
|
||||
|
||||
// LoginWithSessionID logs in via a crunchyroll session id.
|
||||
// Session ids are automatically generated as a cookie when visiting https://www.crunchyroll.com
|
||||
func LoginWithSessionID(sessionID string, locale LOCALE, client *http.Client) (*Crunchyroll, error) {
|
||||
crunchy := &Crunchyroll{
|
||||
Client: client,
|
||||
Locale: locale,
|
||||
SessionID: sessionID,
|
||||
}
|
||||
var endpoint string
|
||||
var err error
|
||||
var resp *http.Response
|
||||
var jsonBody map[string]interface{}
|
||||
|
||||
// start session
|
||||
endpoint = fmt.Sprintf("https://api.crunchyroll.com/start_session.0.json?session_id=%s",
|
||||
sessionID)
|
||||
resp, err = client.Get(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
if _, ok := jsonBody["message"]; ok {
|
||||
return nil, errors.New("invalid session id")
|
||||
}
|
||||
data := jsonBody["data"].(map[string]interface{})
|
||||
|
||||
crunchy.Config.CountryCode = data["country_code"].(string)
|
||||
user := data["user"]
|
||||
if user == nil {
|
||||
return nil, errors.New("invalid session id, user is not logged in")
|
||||
}
|
||||
if user.(map[string]interface{})["premium"] == "" {
|
||||
crunchy.Config.Premium = false
|
||||
crunchy.Config.Channel = "-"
|
||||
} else {
|
||||
crunchy.Config.Premium = true
|
||||
crunchy.Config.Channel = "crunchyroll"
|
||||
}
|
||||
|
||||
var etpRt string
|
||||
for _, cookie := range resp.Cookies() {
|
||||
if cookie.Name == "etp_rt" {
|
||||
etpRt = cookie.Value
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// token
|
||||
endpoint = "https://beta-api.crunchyroll.com/auth/v1/token"
|
||||
grantType := url.Values{}
|
||||
grantType.Set("grant_type", "etp_rt_cookie")
|
||||
|
||||
authRequest, err := http.NewRequest(http.MethodPost, endpoint, bytes.NewBufferString(grantType.Encode()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
authRequest.Header.Add("Authorization", "Basic bm9haWhkZXZtXzZpeWcwYThsMHE6")
|
||||
authRequest.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
authRequest.AddCookie(&http.Cookie{
|
||||
Name: "session_id",
|
||||
Value: sessionID,
|
||||
})
|
||||
authRequest.AddCookie(&http.Cookie{
|
||||
Name: "etp_rt",
|
||||
Value: etpRt,
|
||||
})
|
||||
|
||||
resp, err = client.Do(authRequest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
crunchy.Config.TokenType = jsonBody["token_type"].(string)
|
||||
crunchy.Config.AccessToken = jsonBody["access_token"].(string)
|
||||
|
||||
// index
|
||||
endpoint = "https://beta-api.crunchyroll.com/index/v2"
|
||||
resp, err = crunchy.request(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
cms := jsonBody["cms"].(map[string]interface{})
|
||||
|
||||
crunchy.Config.Policy = cms["policy"].(string)
|
||||
crunchy.Config.Signature = cms["signature"].(string)
|
||||
crunchy.Config.KeyPairID = cms["key_pair_id"].(string)
|
||||
|
||||
// me
|
||||
endpoint = "https://beta-api.crunchyroll.com/accounts/v1/me"
|
||||
resp, err = crunchy.request(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
crunchy.Config.AccountID = jsonBody["account_id"].(string)
|
||||
crunchy.Config.ExternalID = jsonBody["external_id"].(string)
|
||||
|
||||
//profile
|
||||
endpoint = "https://beta-api.crunchyroll.com/accounts/v1/me/profile"
|
||||
resp, err = crunchy.request(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
crunchy.Config.MaturityRating = jsonBody["maturity_rating"].(string)
|
||||
|
||||
return crunchy, nil
|
||||
}
|
||||
|
||||
// request is a base function which handles api requests
|
||||
func (c *Crunchyroll) request(endpoint string) (*http.Response, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, endpoint, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Add("Authorization", fmt.Sprintf("%s %s", c.Config.TokenType, c.Config.AccessToken))
|
||||
|
||||
resp, err := c.Client.Do(req)
|
||||
if err == nil {
|
||||
bodyAsBytes, _ := ioutil.ReadAll(resp.Body)
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
return nil, &AccessError{
|
||||
URL: endpoint,
|
||||
Body: bodyAsBytes,
|
||||
}
|
||||
} else {
|
||||
var errStruct struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
json.NewDecoder(bytes.NewBuffer(bodyAsBytes)).Decode(&errStruct)
|
||||
if errStruct.Message != "" {
|
||||
return nil, &AccessError{
|
||||
URL: endpoint,
|
||||
Body: bodyAsBytes,
|
||||
Message: errStruct.Message,
|
||||
}
|
||||
}
|
||||
}
|
||||
resp.Body = ioutil.NopCloser(bytes.NewBuffer(bodyAsBytes))
|
||||
}
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// Search searches a query and returns all found series and movies within the given limit
|
||||
func (c *Crunchyroll) Search(query string, limit uint) (s []*Series, m []*Movie, err error) {
|
||||
searchEndpoint := fmt.Sprintf("https://beta-api.crunchyroll.com/content/v1/search?q=%s&n=%d&type=&locale=%s",
|
||||
query, limit, c.Locale)
|
||||
resp, err := c.request(searchEndpoint)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
for _, item := range jsonBody["items"].([]interface{}) {
|
||||
item := item.(map[string]interface{})
|
||||
if item["total"].(float64) > 0 {
|
||||
switch item["type"] {
|
||||
case "series":
|
||||
for _, series := range item["items"].([]interface{}) {
|
||||
series2 := &Series{
|
||||
crunchy: c,
|
||||
}
|
||||
if err := decodeMapToStruct(series, series2); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if err := decodeMapToStruct(series.(map[string]interface{})["series_metadata"].(map[string]interface{}), series2); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
s = append(s, series2)
|
||||
}
|
||||
case "movie_listing":
|
||||
for _, movie := range item["items"].([]interface{}) {
|
||||
movie2 := &Movie{
|
||||
crunchy: c,
|
||||
}
|
||||
if err := decodeMapToStruct(movie, movie2); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
m = append(m, movie2)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return s, m, nil
|
||||
}
|
||||
|
||||
// FindVideo fins a Video (Season or Movie) by a crunchyroll link
|
||||
// e.g. https://www.crunchyroll.com/darling-in-the-franxx
|
||||
func (c *Crunchyroll) FindVideo(seriesUrl string) (Video, error) {
|
||||
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?crunchyroll\.com(/\w{2}(-\w{2})?)?/(?P<series>[^/]+)/?$`)
|
||||
if urlMatch := pattern.FindAllStringSubmatch(seriesUrl, -1); len(urlMatch) != 0 {
|
||||
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
|
||||
title, ok := groups["series"]
|
||||
if !ok {
|
||||
return nil, errors.New("series could not be found")
|
||||
}
|
||||
|
||||
s, m, err := c.Search(title, 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(s) > 0 {
|
||||
return s[0], nil
|
||||
} else if len(m) > 0 {
|
||||
return m[0], nil
|
||||
}
|
||||
return nil, errors.New("no series or movie could be found")
|
||||
}
|
||||
|
||||
return nil, errors.New("invalid url")
|
||||
}
|
||||
|
||||
// FindEpisode finds an episode by its crunchyroll link
|
||||
// e.g. https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575
|
||||
func (c *Crunchyroll) FindEpisode(url string) ([]*Episode, error) {
|
||||
pattern := regexp.MustCompile(`(?m)^https?://(www\.)?crunchyroll\.com(/\w{2}(-\w{2})?)?/(?P<series>[^/]+)/episode-\d+-(?P<title>\D+).*`)
|
||||
if urlMatch := pattern.FindAllStringSubmatch(url, -1); len(urlMatch) != 0 {
|
||||
groups := regexGroups(urlMatch, pattern.SubexpNames()...)
|
||||
var slugTitle string
|
||||
var ok bool
|
||||
if slugTitle, ok = groups["title"]; !ok {
|
||||
return nil, errors.New("invalid url")
|
||||
}
|
||||
slugTitle = strings.TrimSuffix(slugTitle, "-")
|
||||
video, err := c.FindVideo(fmt.Sprintf("https://www.crunchyroll.com/%s", groups["series"]))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
seasons, err := video.(*Series).Seasons()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var matchingEpisodes []*Episode
|
||||
for _, season := range seasons {
|
||||
episodes, err := season.Episodes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, episode := range episodes {
|
||||
if episode.SlugTitle == slugTitle {
|
||||
matchingEpisodes = append(matchingEpisodes, episode)
|
||||
}
|
||||
}
|
||||
}
|
||||
return matchingEpisodes, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("invalid url")
|
||||
}
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"github.com/grafov/m3u8"
|
||||
"net/http"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
email = os.Getenv("EMAIL")
|
||||
password = os.Getenv("PASSWORD")
|
||||
sessionID = os.Getenv("SESSION_ID")
|
||||
|
||||
crunchy *Crunchyroll
|
||||
season *Season
|
||||
episode *Episode
|
||||
stream *Stream
|
||||
)
|
||||
|
||||
func TestLogin(t *testing.T) {
|
||||
var err error
|
||||
if email != "" && password != "" {
|
||||
crunchy, err = LoginWithCredentials(email, password, DE, http.DefaultClient)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Logf("Logged in with email and password\nAuth: %s %s\nSession id: %s",
|
||||
crunchy.Config.TokenType, crunchy.Config.AccessToken, crunchy.SessionID)
|
||||
} else if sessionID != "" {
|
||||
crunchy, err = LoginWithSessionID(sessionID, DE, http.DefaultClient)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Logf("Logged in with session id\nAuth: %s %s\nSession id: %s",
|
||||
crunchy.Config.TokenType, crunchy.Config.AccessToken, crunchy.SessionID)
|
||||
} else {
|
||||
t.Skipf("email and / or password and session id environtment variables are not set, skipping login. All following test may fail also")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCrunchy_Search(t *testing.T) {
|
||||
series, movies, err := crunchy.Search("movie", 20)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Logf("Found %d series and %d movie(s) for search query `movie`", len(series), len(movies))
|
||||
}
|
||||
|
||||
func TestSeries_Seasons(t *testing.T) {
|
||||
video, err := crunchy.FindVideo("https://www.crunchyroll.com/darling-in-the-franxx")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
series := video.(*Series)
|
||||
seasons, err := series.Seasons()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if len(seasons) > 0 {
|
||||
season = seasons[4]
|
||||
} else {
|
||||
t.Logf("%s has no seasons, some future test will fail", series.Title)
|
||||
}
|
||||
t.Logf("Found %d seasons for series %s", len(seasons), series.Title)
|
||||
}
|
||||
|
||||
func TestCrunchyroll_FindEpisode(t *testing.T) {
|
||||
episodes, err := crunchy.FindEpisode("https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Logf("Found %d episodes for episode %s", len(episodes), "https://www.crunchyroll.com/darling-in-the-franxx/episode-1-alone-and-lonesome-759575")
|
||||
}
|
||||
|
||||
func TestSeason_Episodes(t *testing.T) {
|
||||
episodes, err := season.Episodes()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if len(episodes) > 0 {
|
||||
episode = episodes[0]
|
||||
} else {
|
||||
t.Logf("%s has no episodes, some future test will fail", season.Title)
|
||||
}
|
||||
t.Logf("Found %d episodes for season %s", len(episodes), season.Title)
|
||||
}
|
||||
|
||||
func TestEpisode_Streams(t *testing.T) {
|
||||
streams, err := episode.Streams()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if len(streams) > 0 {
|
||||
stream = streams[0]
|
||||
} else {
|
||||
t.Logf("%s has no streams, some future test will fail", season.Title)
|
||||
}
|
||||
t.Logf("Found %d streams for episode %s", len(streams), season.Title)
|
||||
}
|
||||
|
||||
func TestFormat_Download(t *testing.T) {
|
||||
formats, err := stream.Formats()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
file, err := os.Create("test")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
formats[0].Download(file, func(segment *m3u8.MediaSegment, current, total int, file *os.File, err error) error {
|
||||
t.Logf("Downloaded %.2f%% (%d/%d)", float32(current)/float32(total)*100, current, total)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
138
episode.go
138
episode.go
|
|
@ -1,138 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Episode struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
siteCache map[string]interface{}
|
||||
|
||||
ID string `json:"id"`
|
||||
SeriesID string `json:"series_id"`
|
||||
SeriesTitle string `json:"series_title"`
|
||||
SeasonNumber int `json:"season_number"`
|
||||
|
||||
Episode string `json:"episode"`
|
||||
EpisodeNumber int `json:"episode_number"`
|
||||
SequenceNumber float64 `json:"sequence_number"`
|
||||
ProductionEpisodeID string `json:"production_episode_id"`
|
||||
|
||||
Title string `json:"title"`
|
||||
SlugTitle string `json:"slug_title"`
|
||||
Description string `json:"description"`
|
||||
NextEpisodeID string `json:"next_episode_id"`
|
||||
NextEpisodeTitle string `json:"next_episode_title"`
|
||||
|
||||
HDFlag bool `json:"hd_flag"`
|
||||
IsMature bool `json:"is_mature"`
|
||||
MatureBlocked bool `json:"mature_blocked"`
|
||||
|
||||
EpisodeAirDate time.Time `json:"episode_air_date"`
|
||||
|
||||
IsSubbed bool `json:"is_subbed"`
|
||||
IsDubbed bool `json:"is_dubbed"`
|
||||
IsClip bool `json:"is_clip"`
|
||||
SeoTitle string `json:"seo_title"`
|
||||
SeoDescription string `json:"seo_description"`
|
||||
SeasonTags []string `json:"season_tags"`
|
||||
|
||||
AvailableOffline bool `json:"available_offline"`
|
||||
Slug string `json:"slug"`
|
||||
|
||||
Images struct {
|
||||
Thumbnail [][]struct {
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Type string `json:"type"`
|
||||
Source string `json:"source"`
|
||||
} `json:"thumbnail"`
|
||||
} `json:"images"`
|
||||
|
||||
DurationMS int `json:"duration_ms"`
|
||||
IsPremiumOnly bool `json:"is_premium_only"`
|
||||
ListingID string `json:"listing_id"`
|
||||
|
||||
SubtitleLocales []LOCALE `json:"subtitle_locales"`
|
||||
Playback string `json:"playback"`
|
||||
|
||||
AvailabilityNotes string `json:"availability_notes"`
|
||||
|
||||
StreamID string
|
||||
}
|
||||
|
||||
// EpisodeFromID returns an episode by its api id
|
||||
func EpisodeFromID(crunchy *Crunchyroll, id string) (*Episode, error) {
|
||||
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/episodes/%s?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
episode := &Episode{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
if err := decodeMapToStruct(jsonBody, episode); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if episode.Playback != "" {
|
||||
streamHref := jsonBody["__links__"].(map[string]interface{})["streams"].(map[string]interface{})["href"].(string)
|
||||
if match := regexp.MustCompile(`(?m)^/cms/v2/\S+videos/(\w+)/streams$`).FindAllStringSubmatch(streamHref, -1); len(match) > 0 {
|
||||
episode.StreamID = match[0][1]
|
||||
}
|
||||
}
|
||||
|
||||
return episode, nil
|
||||
}
|
||||
|
||||
// AudioLocale returns the audio locale of the episode.
|
||||
// Every episode in a season (should) have the same audio locale,
|
||||
// so if you want to get the audio locale of a season, just call this method on the first episode of the season.
|
||||
// Otherwise, this function will cause massive heap on a season which many episodes
|
||||
func (e *Episode) AudioLocale() (LOCALE, error) {
|
||||
resp, err := e.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
e.crunchy.Config.CountryCode,
|
||||
e.crunchy.Config.MaturityRating,
|
||||
e.crunchy.Config.Channel,
|
||||
e.StreamID,
|
||||
e.crunchy.Locale,
|
||||
e.crunchy.Config.Signature,
|
||||
e.crunchy.Config.Policy,
|
||||
e.crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
e.siteCache = jsonBody
|
||||
|
||||
return LOCALE(jsonBody["audio_locale"].(string)), nil
|
||||
}
|
||||
|
||||
// Streams returns all streams which are available for the episode
|
||||
func (e *Episode) Streams() ([]*Stream, error) {
|
||||
return fromVideoStreams(e.crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
e.crunchy.Config.CountryCode,
|
||||
e.crunchy.Config.MaturityRating,
|
||||
e.crunchy.Config.Channel,
|
||||
e.StreamID,
|
||||
e.crunchy.Locale,
|
||||
e.crunchy.Config.Signature,
|
||||
e.crunchy.Config.Policy,
|
||||
e.crunchy.Config.KeyPairID))
|
||||
}
|
||||
21
error.go
21
error.go
|
|
@ -1,21 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import "fmt"
|
||||
|
||||
// AccessError is an error which will be returned when some special sort of api request fails.
|
||||
// See Crunchyroll.request when the error gets used
|
||||
type AccessError struct {
|
||||
error
|
||||
|
||||
URL string
|
||||
Body []byte
|
||||
Message string
|
||||
}
|
||||
|
||||
func (ae *AccessError) Error() string {
|
||||
if ae.Message == "" {
|
||||
return fmt.Sprintf("Access token invalid for url %s\nBody: %s", ae.URL, string(ae.Body))
|
||||
} else {
|
||||
return ae.Message
|
||||
}
|
||||
}
|
||||
59
flake.lock
generated
Normal file
59
flake.lock
generated
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1710534455,
|
||||
"narHash": "sha256-huQT4Xs0y4EeFKn2BTBVYgEwJSv8SDlm82uWgMnCMmI=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "9af9c1c87ed3e3ed271934cb896e0cdd33dae212",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1710146030,
|
||||
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "flake-utils",
|
||||
"type": "indirect"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
76
flake.nix
Normal file
76
flake.nix
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "flake:nixpkgs/nixpkgs-unstable";
|
||||
utils.url = "flake:flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, utils }: utils.lib.eachDefaultSystem
|
||||
(system:
|
||||
let
|
||||
# enable musl on Linux will trigger a toolchain rebuild
|
||||
# making the build very slow
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
# if nixpkgs.legacyPackages.${system}.stdenv.hostPlatform.isLinux
|
||||
# then nixpkgs.legacyPackages.${system}.pkgsMusl
|
||||
# else nixpkgs.legacyPackages.${system};
|
||||
|
||||
crunchy-cli = pkgs.rustPlatform.buildRustPackage.override { stdenv = pkgs.clangStdenv; } rec {
|
||||
pname = "crunchy-cli";
|
||||
inherit ((pkgs.lib.importTOML ./Cargo.toml).package) version;
|
||||
|
||||
src = pkgs.lib.cleanSource ./.;
|
||||
|
||||
cargoLock = {
|
||||
lockFile = ./Cargo.lock;
|
||||
allowBuiltinFetchGit = true;
|
||||
};
|
||||
|
||||
buildNoDefaultFeatures = true;
|
||||
buildFeatures = [ "openssl-tls" ];
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkgs.pkg-config
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||
pkgs.xcbuild
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
pkgs.openssl
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||
pkgs.darwin.Security
|
||||
];
|
||||
};
|
||||
in
|
||||
{
|
||||
packages.default = crunchy-cli;
|
||||
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
cargo
|
||||
clippy
|
||||
rust-analyzer
|
||||
rustc
|
||||
rustfmt
|
||||
];
|
||||
|
||||
inputsFrom = builtins.attrValues self.packages.${system};
|
||||
|
||||
buildInputs = [
|
||||
pkgs.openssl
|
||||
pkgs.libiconv
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||
pkgs.darwin.apple_sdk.frameworks.CoreServices
|
||||
pkgs.darwin.Security
|
||||
];
|
||||
|
||||
RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc;
|
||||
};
|
||||
|
||||
formatter = pkgs.nixpkgs-fmt;
|
||||
}
|
||||
) // {
|
||||
overlays.default = final: prev: {
|
||||
inherit (self.packages.${final.system}) crunchy-cli;
|
||||
};
|
||||
};
|
||||
}
|
||||
224
format.go
224
format.go
|
|
@ -1,224 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"fmt"
|
||||
"github.com/grafov/m3u8"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
const (
|
||||
EPISODE FormatType = "episodes"
|
||||
MOVIE = "movies"
|
||||
)
|
||||
|
||||
type FormatType string
|
||||
type Format struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
ID string
|
||||
// FormatType represents if the format parent is an episode or a movie
|
||||
FormatType FormatType
|
||||
Video *m3u8.Variant
|
||||
AudioLocale LOCALE
|
||||
Hardsub LOCALE
|
||||
Subtitles []*Subtitle
|
||||
}
|
||||
|
||||
// Download downloads the format to the given output file (as .ts file).
|
||||
// See Format.DownloadSegments for more information
|
||||
func (f *Format) Download(output *os.File, onSegmentDownload func(segment *m3u8.MediaSegment, current, total int, file *os.File, err error) error) error {
|
||||
downloadDir, err := os.MkdirTemp("", "crunchy_")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.RemoveAll(downloadDir)
|
||||
|
||||
if err := f.DownloadSegments(downloadDir, 4, onSegmentDownload); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return f.mergeSegments(downloadDir, output)
|
||||
}
|
||||
|
||||
// DownloadSegments downloads every mpeg transport stream segment to a given directory (more information below).
|
||||
// After every segment download onSegmentDownload will be called with:
|
||||
// the downloaded segment, the current position, the total size of segments to download, the file where the segment content was written to an error (if occurred).
|
||||
// The filename is always <number of downloaded segment>.ts
|
||||
//
|
||||
// Short explanation:
|
||||
// The actual crunchyroll video is split up in multiple segments (or video files) which have to be downloaded and merged after to generate a single video file.
|
||||
// And this function just downloads each of this segment into the given directory.
|
||||
// See https://en.wikipedia.org/wiki/MPEG_transport_stream for more information
|
||||
func (f *Format) DownloadSegments(outputDir string, goroutines int, onSegmentDownload func(segment *m3u8.MediaSegment, current, total int, file *os.File, err error) error) error {
|
||||
resp, err := f.crunchy.Client.Get(f.Video.URI)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
// reads the m3u8 file
|
||||
playlist, _, err := m3u8.DecodeFrom(resp.Body, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// extracts the segments from the playlist
|
||||
var segments []*m3u8.MediaSegment
|
||||
for _, segment := range playlist.(*m3u8.MediaPlaylist).Segments {
|
||||
// some segments are nil, so they have to be filtered out
|
||||
if segment != nil {
|
||||
segments = append(segments, segment)
|
||||
}
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
chunkSize := len(segments) / goroutines
|
||||
|
||||
// when a afterDownload call returns an error, this channel will be set to true and stop all goroutines
|
||||
quit := make(chan bool)
|
||||
|
||||
// receives the decrypt block and iv from the first segment.
|
||||
// in my tests, only the first segment has specified this data, so the decryption data from this first segments will be used in every other segment too
|
||||
block, iv, err := f.getCrypt(segments[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var current int32
|
||||
for i := 0; i < len(segments); i += chunkSize {
|
||||
wg.Add(1)
|
||||
end := i + chunkSize
|
||||
if end > len(segments) {
|
||||
end = len(segments)
|
||||
}
|
||||
i := i
|
||||
go func() {
|
||||
for j, segment := range segments[i:end] {
|
||||
select {
|
||||
case <-quit:
|
||||
break
|
||||
default:
|
||||
var file *os.File
|
||||
file, err = f.downloadSegment(segment, filepath.Join(outputDir, fmt.Sprintf("%d.ts", i+j)), block, iv)
|
||||
if err != nil {
|
||||
quit <- true
|
||||
break
|
||||
}
|
||||
if onSegmentDownload != nil {
|
||||
if err = onSegmentDownload(segment, int(atomic.AddInt32(¤t, 1)), len(segments), file, err); err != nil {
|
||||
quit <- true
|
||||
file.Close()
|
||||
break
|
||||
}
|
||||
}
|
||||
file.Close()
|
||||
}
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
select {
|
||||
case <-quit:
|
||||
return err
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// getCrypt extracts the key and iv of a m3u8 segment and converts it into a cipher.Block block and a iv byte sequence
|
||||
func (f *Format) getCrypt(segment *m3u8.MediaSegment) (block cipher.Block, iv []byte, err error) {
|
||||
var resp *http.Response
|
||||
|
||||
resp, err = f.crunchy.Client.Get(segment.Key.URI)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
key, err := ioutil.ReadAll(resp.Body)
|
||||
|
||||
block, err = aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
iv = []byte(segment.Key.IV)
|
||||
if len(iv) == 0 {
|
||||
iv = key
|
||||
}
|
||||
|
||||
return block, iv, nil
|
||||
}
|
||||
|
||||
// downloadSegment downloads a segments, decrypts it and names it after the given index
|
||||
func (f *Format) downloadSegment(segment *m3u8.MediaSegment, filename string, block cipher.Block, iv []byte) (*os.File, error) {
|
||||
// every segment is aes-128 encrypted and has to be decrypted when downloaded
|
||||
content, err := decryptSegment(f.crunchy.Client, segment, block, iv)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// some mpeg stream things. see the link beneath for more information
|
||||
// https://github.com/oopsguy/m3u8/blob/4150e93ec8f4f8718875a02973f5d792648ecb97/dl/dowloader.go#L135
|
||||
/*syncByte := uint8(71) //0x47
|
||||
for k := 0; k < len(content); k++ {
|
||||
if content[k] == syncByte {
|
||||
content = content[k:]
|
||||
break
|
||||
}
|
||||
}*/
|
||||
|
||||
file, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
if _, err = file.Write(content); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return file, nil
|
||||
}
|
||||
|
||||
// mergeSegments reads every file in tempPath and write their content to output
|
||||
func (f *Format) mergeSegments(tempPath string, output *os.File) error {
|
||||
dir, err := os.ReadDir(tempPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
writer := bufio.NewWriter(output)
|
||||
defer writer.Flush()
|
||||
|
||||
// sort the directory files after their numeric names
|
||||
sort.Slice(dir, func(i, j int) bool {
|
||||
iNum, err := strconv.Atoi(strings.Split(dir[i].Name(), ".")[0])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
jNum, err := strconv.Atoi(strings.Split(dir[j].Name(), ".")[0])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return iNum < jNum
|
||||
})
|
||||
|
||||
for _, file := range dir {
|
||||
bodyAsBytes, err := ioutil.ReadFile(filepath.Join(tempPath, file.Name()))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = writer.Write(bodyAsBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
8
go.mod
8
go.mod
|
|
@ -1,8 +0,0 @@
|
|||
module github.com/ByteDream/crunchyroll-go
|
||||
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/grafov/m3u8 v0.11.1
|
||||
github.com/spf13/cobra v1.2.1
|
||||
)
|
||||
568
go.sum
568
go.sum
|
|
@ -1,568 +0,0 @@
|
|||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
|
||||
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
|
||||
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
|
||||
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
|
||||
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
|
||||
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
|
||||
cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
|
||||
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
|
||||
cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
|
||||
cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
|
||||
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
|
||||
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
|
||||
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
|
||||
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
|
||||
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
|
||||
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
|
||||
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
|
||||
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
|
||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
|
||||
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
|
||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
|
||||
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/grafov/m3u8 v0.11.1 h1:igZ7EBIB2IAsPPazKwRKdbhxcoBKO3lO1UY57PZDeNA=
|
||||
github.com/grafov/m3u8 v0.11.1/go.mod h1:nqzOkfBiZJENr52zTVd/Dcl03yzphIMbJqkXGu+u080=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
|
||||
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw=
|
||||
github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
|
||||
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
|
||||
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
|
||||
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
||||
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
|
||||
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
|
||||
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
|
||||
google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
|
||||
google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
|
||||
google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
|
||||
google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
|
||||
google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
|
||||
google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
|
||||
google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
|
||||
google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
|
||||
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
|
||||
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
|
||||
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
|
||||
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
|
||||
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
|
||||
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
|
||||
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
|
||||
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type MovieListing struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
ID string `json:"id"`
|
||||
|
||||
Title string `json:"title"`
|
||||
Slug string `json:"slug"`
|
||||
SlugTitle string `json:"slug_title"`
|
||||
Description string `json:"description"`
|
||||
|
||||
Images struct {
|
||||
Thumbnail [][]struct {
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Type string `json:"type"`
|
||||
Source string `json:"source"`
|
||||
} `json:"thumbnail"`
|
||||
} `json:"images"`
|
||||
|
||||
DurationMS int `json:"duration_ms"`
|
||||
IsPremiumOnly bool `json:"is_premium_only"`
|
||||
ListeningID string `json:"listening_id"`
|
||||
IsMature bool `json:"is_mature"`
|
||||
AvailableOffline bool `json:"available_offline"`
|
||||
IsSubbed bool `json:"is_subbed"`
|
||||
IsDubbed bool `json:"is_dubbed"`
|
||||
|
||||
Playback string `json:"playback"`
|
||||
AvailabilityNotes string `json:"availability_notes"`
|
||||
}
|
||||
|
||||
// MovieListingFromID returns a movie listing by its api id
|
||||
func MovieListingFromID(crunchy *Crunchyroll, id string) (*MovieListing, error) {
|
||||
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movie_listing/%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
movieListing := &MovieListing{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(jsonBody, movieListing); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return movieListing, nil
|
||||
}
|
||||
|
||||
// AudioLocale is same as Episode.AudioLocale
|
||||
func (ml *MovieListing) AudioLocale() (LOCALE, error) {
|
||||
resp, err := ml.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
ml.crunchy.Config.CountryCode,
|
||||
ml.crunchy.Config.MaturityRating,
|
||||
ml.crunchy.Config.Channel,
|
||||
ml.ID,
|
||||
ml.crunchy.Locale,
|
||||
ml.crunchy.Config.Signature,
|
||||
ml.crunchy.Config.Policy,
|
||||
ml.crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
return LOCALE(jsonBody["audio_locale"].(string)), nil
|
||||
}
|
||||
|
||||
// Streams returns all streams which are available for the movie listing
|
||||
func (ml *MovieListing) Streams() ([]*Stream, error) {
|
||||
return fromVideoStreams(ml.crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
ml.crunchy.Config.CountryCode,
|
||||
ml.crunchy.Config.MaturityRating,
|
||||
ml.crunchy.Config.Channel,
|
||||
ml.ID,
|
||||
ml.crunchy.Locale,
|
||||
ml.crunchy.Config.Signature,
|
||||
ml.crunchy.Config.Policy,
|
||||
ml.crunchy.Config.KeyPairID))
|
||||
}
|
||||
95
season.go
95
season.go
|
|
@ -1,95 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
type Season struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
SlugTitle string `json:"slug_title"`
|
||||
SeriesID string `json:"series_id"`
|
||||
SeasonNumber int `json:"season_number"`
|
||||
IsComplete bool `json:"is_complete"`
|
||||
Description string `json:"description"`
|
||||
Keywords []string `json:"keywords"`
|
||||
SeasonTags []string `json:"season_tags"`
|
||||
IsMature bool `json:"is_mature"`
|
||||
MatureBlocked bool `json:"mature_blocked"`
|
||||
IsSubbed bool `json:"is_subbed"`
|
||||
IsDubbed bool `json:"is_dubbed"`
|
||||
IsSimulcast bool `json:"is_simulcast"`
|
||||
SeoTitle string `json:"seo_title"`
|
||||
SeoDescription string `json:"seo_description"`
|
||||
|
||||
Language LOCALE
|
||||
}
|
||||
|
||||
// SeasonFromID returns a season by its api id
|
||||
func SeasonFromID(crunchy *Crunchyroll, id string) (*Season, error) {
|
||||
resp, err := crunchy.Client.Get(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/seasons/%s?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
season := &Season{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
if err := decodeMapToStruct(jsonBody, season); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return season, nil
|
||||
}
|
||||
|
||||
// Episodes returns all episodes which are available for the season
|
||||
func (s *Season) Episodes() (episodes []*Episode, err error) {
|
||||
resp, err := s.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/episodes?season_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
s.crunchy.Config.CountryCode,
|
||||
s.crunchy.Config.MaturityRating,
|
||||
s.crunchy.Config.Channel,
|
||||
s.ID,
|
||||
s.crunchy.Locale,
|
||||
s.crunchy.Config.Signature,
|
||||
s.crunchy.Config.Policy,
|
||||
s.crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
for _, item := range jsonBody["items"].([]interface{}) {
|
||||
episode := &Episode{
|
||||
crunchy: s.crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(item, episode); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if episode.Playback != "" {
|
||||
streamHref := item.(map[string]interface{})["__links__"].(map[string]interface{})["streams"].(map[string]interface{})["href"].(string)
|
||||
if match := regexp.MustCompile(`(?m)^/cms/v2/\S+videos/(\w+)/streams$`).FindAllStringSubmatch(streamHref, -1); len(match) > 0 {
|
||||
episode.StreamID = match[0][1]
|
||||
}
|
||||
}
|
||||
episodes = append(episodes, episode)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
12
src/main.rs
Normal file
12
src/main.rs
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
#[cfg(not(any(
|
||||
feature = "rustls-tls",
|
||||
feature = "native-tls",
|
||||
feature = "openssl-tls",
|
||||
feature = "openssl-tls-static"
|
||||
)))]
|
||||
compile_error!("At least one tls feature must be activated");
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
crunchy_cli_core::main(&std::env::args().collect::<Vec<String>>()).await
|
||||
}
|
||||
116
stream.go
116
stream.go
|
|
@ -1,116 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/grafov/m3u8"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
type Stream struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
HardsubLocale LOCALE
|
||||
AudioLocale LOCALE
|
||||
Subtitles []*Subtitle
|
||||
|
||||
formatType FormatType
|
||||
id string
|
||||
streamURL string
|
||||
}
|
||||
|
||||
// StreamsFromID returns a stream by its api id
|
||||
func StreamsFromID(crunchy *Crunchyroll, id string) ([]*Stream, error) {
|
||||
return fromVideoStreams(crunchy, fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/videos/%s/streams?locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
}
|
||||
|
||||
// Formats returns all formats which are available for the stream
|
||||
func (s *Stream) Formats() ([]*Format, error) {
|
||||
resp, err := s.crunchy.Client.Get(s.streamURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
playlist, _, err := m3u8.DecodeFrom(resp.Body, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var formats []*Format
|
||||
for _, variant := range playlist.(*m3u8.MasterPlaylist).Variants {
|
||||
formats = append(formats, &Format{
|
||||
crunchy: s.crunchy,
|
||||
ID: s.id,
|
||||
FormatType: s.formatType,
|
||||
Video: variant,
|
||||
AudioLocale: s.AudioLocale,
|
||||
Hardsub: s.HardsubLocale,
|
||||
Subtitles: s.Subtitles,
|
||||
})
|
||||
}
|
||||
return formats, nil
|
||||
}
|
||||
|
||||
// fromVideoStreams returns all streams which are accessible via the endpoint
|
||||
func fromVideoStreams(crunchy *Crunchyroll, endpoint string) (streams []*Stream, err error) {
|
||||
resp, err := crunchy.request(endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
if len(jsonBody) == 0 {
|
||||
// this may get thrown when the crunchyroll account has just a normal account and not one with premium
|
||||
return nil, errors.New("no stream available")
|
||||
}
|
||||
|
||||
audioLocale := jsonBody["audio_locale"].(string)
|
||||
|
||||
var subtitles []*Subtitle
|
||||
for _, rawSubtitle := range jsonBody["subtitles"].(map[string]interface{}) {
|
||||
subtitle := &Subtitle{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
decodeMapToStruct(rawSubtitle.(map[string]interface{}), subtitle)
|
||||
subtitles = append(subtitles, subtitle)
|
||||
}
|
||||
|
||||
for _, streamData := range jsonBody["streams"].(map[string]interface{})["adaptive_hls"].(map[string]interface{}) {
|
||||
streamData := streamData.(map[string]interface{})
|
||||
|
||||
hardsubLocale := streamData["hardsub_locale"].(string)
|
||||
|
||||
var id string
|
||||
var formatType FormatType
|
||||
href := jsonBody["__links__"].(map[string]interface{})["resource"].(map[string]interface{})["href"].(string)
|
||||
if match := regexp.MustCompile(`(?sm)^/cms/v2/\S+/crunchyroll/(\w+)/(\w+)$`).FindAllStringSubmatch(href, -1); len(match) > 0 {
|
||||
formatType = FormatType(match[0][1])
|
||||
id = match[0][2]
|
||||
}
|
||||
|
||||
stream := &Stream{
|
||||
crunchy: crunchy,
|
||||
HardsubLocale: LOCALE(hardsubLocale),
|
||||
formatType: formatType,
|
||||
id: id,
|
||||
streamURL: streamData["url"].(string),
|
||||
AudioLocale: LOCALE(audioLocale),
|
||||
Subtitles: subtitles,
|
||||
}
|
||||
|
||||
streams = append(streams, stream)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
25
subtitle.go
25
subtitle.go
|
|
@ -1,25 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Subtitle struct {
|
||||
crunchy *Crunchyroll
|
||||
|
||||
Locale LOCALE `json:"locale"`
|
||||
URL string `json:"url"`
|
||||
Format string `json:"format"`
|
||||
}
|
||||
|
||||
func (s Subtitle) Download(file *os.File) error {
|
||||
resp, err := s.crunchy.Client.Get(s.URL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
_, err = io.Copy(file, resp.Body)
|
||||
return err
|
||||
}
|
||||
57
utils.go
57
utils.go
|
|
@ -1,57 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"crypto/cipher"
|
||||
"encoding/json"
|
||||
"github.com/grafov/m3u8"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func decodeMapToStruct(m interface{}, s interface{}) error {
|
||||
jsonBody, err := json.Marshal(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return json.Unmarshal(jsonBody, s)
|
||||
}
|
||||
|
||||
// https://github.com/oopsguy/m3u8/blob/4150e93ec8f4f8718875a02973f5d792648ecb97/tool/crypt.go#L25
|
||||
func decryptSegment(client *http.Client, segment *m3u8.MediaSegment, block cipher.Block, iv []byte) ([]byte, error) {
|
||||
resp, err := client.Get(segment.URI)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
raw, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
blockMode := cipher.NewCBCDecrypter(block, iv[:block.BlockSize()])
|
||||
decrypted := make([]byte, len(raw))
|
||||
blockMode.CryptBlocks(decrypted, raw)
|
||||
raw = pkcs5UnPadding(decrypted)
|
||||
|
||||
return raw, nil
|
||||
}
|
||||
|
||||
// https://github.com/oopsguy/m3u8/blob/4150e93ec8f4f8718875a02973f5d792648ecb97/tool/crypt.go#L47
|
||||
func pkcs5UnPadding(origData []byte) []byte {
|
||||
length := len(origData)
|
||||
unPadding := int(origData[length-1])
|
||||
return origData[:(length - unPadding)]
|
||||
}
|
||||
|
||||
func regexGroups(parsed [][]string, subexpNames ...string) map[string]string {
|
||||
groups := map[string]string{}
|
||||
for _, match := range parsed {
|
||||
for i, content := range match {
|
||||
if subexpName := subexpNames[i]; subexpName != "" {
|
||||
groups[subexpName] = content
|
||||
}
|
||||
}
|
||||
}
|
||||
return groups
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
)
|
||||
|
||||
var AllLocales = []crunchyroll.LOCALE{
|
||||
crunchyroll.JP,
|
||||
crunchyroll.US,
|
||||
crunchyroll.LA,
|
||||
crunchyroll.ES,
|
||||
crunchyroll.FR,
|
||||
crunchyroll.BR,
|
||||
crunchyroll.IT,
|
||||
crunchyroll.DE,
|
||||
crunchyroll.RU,
|
||||
crunchyroll.ME,
|
||||
}
|
||||
|
||||
// ValidateLocale validates if the given locale actually exist
|
||||
func ValidateLocale(locale crunchyroll.LOCALE) bool {
|
||||
for _, l := range AllLocales {
|
||||
if l == locale {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// LocaleLanguage returns the country by its locale
|
||||
func LocaleLanguage(locale crunchyroll.LOCALE) string {
|
||||
switch locale {
|
||||
case crunchyroll.JP:
|
||||
return "Japanese"
|
||||
case crunchyroll.US:
|
||||
return "English (US)"
|
||||
case crunchyroll.LA:
|
||||
return "Spanish (Latin America)"
|
||||
case crunchyroll.ES:
|
||||
return "Spanish (Spain)"
|
||||
case crunchyroll.FR:
|
||||
return "French"
|
||||
case crunchyroll.BR:
|
||||
return "Portuguese (Brazil)"
|
||||
case crunchyroll.IT:
|
||||
return "Italian"
|
||||
case crunchyroll.DE:
|
||||
return "German"
|
||||
case crunchyroll.RU:
|
||||
return "Russian"
|
||||
case crunchyroll.ME:
|
||||
return "Arabic"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// SubtitleByLocale returns the subtitle of a crunchyroll.Format by its locale.
|
||||
// Check the second ok return value if the format has this subtitle
|
||||
func SubtitleByLocale(format *crunchyroll.Format, locale crunchyroll.LOCALE) (subtitle *crunchyroll.Subtitle, ok bool) {
|
||||
if format.Subtitles == nil {
|
||||
return
|
||||
}
|
||||
for _, sub := range format.Subtitles {
|
||||
if sub.Locale == locale {
|
||||
return sub, true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// MovieListingsByDuration sorts movie listings by their duration
|
||||
type MovieListingsByDuration []*crunchyroll.MovieListing
|
||||
|
||||
func (mlbd MovieListingsByDuration) Len() int {
|
||||
return len(mlbd)
|
||||
}
|
||||
func (mlbd MovieListingsByDuration) Swap(i, j int) {
|
||||
mlbd[i], mlbd[j] = mlbd[j], mlbd[i]
|
||||
}
|
||||
func (mlbd MovieListingsByDuration) Less(i, j int) bool {
|
||||
return mlbd[i].DurationMS < mlbd[j].DurationMS
|
||||
}
|
||||
|
||||
// EpisodesByDuration episodes by their duration
|
||||
type EpisodesByDuration []*crunchyroll.Episode
|
||||
|
||||
func (ebd EpisodesByDuration) Len() int {
|
||||
return len(ebd)
|
||||
}
|
||||
func (ebd EpisodesByDuration) Swap(i, j int) {
|
||||
ebd[i], ebd[j] = ebd[j], ebd[i]
|
||||
}
|
||||
func (ebd EpisodesByDuration) Less(i, j int) bool {
|
||||
return ebd[i].DurationMS < ebd[j].DurationMS
|
||||
}
|
||||
|
||||
// FormatsByResolution sort formats after their resolution
|
||||
type FormatsByResolution []*crunchyroll.Format
|
||||
|
||||
func (fbr FormatsByResolution) Len() int {
|
||||
return len(fbr)
|
||||
}
|
||||
func (fbr FormatsByResolution) Swap(i, j int) {
|
||||
fbr[i], fbr[j] = fbr[j], fbr[i]
|
||||
}
|
||||
func (fbr FormatsByResolution) Less(i, j int) bool {
|
||||
iSplitRes := strings.Split(fbr[i].Video.Resolution, "x")
|
||||
iResX, _ := strconv.Atoi(iSplitRes[0])
|
||||
iResY, _ := strconv.Atoi(iSplitRes[1])
|
||||
|
||||
jSplitRes := strings.Split(fbr[j].Video.Resolution, "x")
|
||||
jResX, _ := strconv.Atoi(jSplitRes[0])
|
||||
jResY, _ := strconv.Atoi(jSplitRes[1])
|
||||
|
||||
return iResX+iResY < jResX+jResY
|
||||
}
|
||||
|
|
@ -1,626 +0,0 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/ByteDream/crunchyroll-go"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// FormatStructure is the basic structure which every other structure implements.
|
||||
// With it, and all other structures the api usage can be simplified magnificent
|
||||
type FormatStructure struct {
|
||||
// initState is true if every format, stream, ... in the structure tree is initialized
|
||||
initState bool
|
||||
|
||||
// getFunc specified the function which will be called if crunchyroll.Format is empty / not initialized yet.
|
||||
// It returns the formats itself, the parent streams (might be nil) and an error if one occurs
|
||||
getFunc func() ([]*crunchyroll.Format, []*crunchyroll.Stream, error)
|
||||
// formats holds all formats which were given
|
||||
formats []*crunchyroll.Format
|
||||
// parents holds all parents which were given
|
||||
parents []*crunchyroll.Stream
|
||||
}
|
||||
|
||||
func newFormatStructure(parentStructure *StreamStructure) *FormatStructure {
|
||||
return &FormatStructure{
|
||||
getFunc: func() (formats []*crunchyroll.Format, parents []*crunchyroll.Stream, err error) {
|
||||
streams, err := parentStructure.Streams()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
var lock sync.Mutex
|
||||
|
||||
for _, stream := range streams {
|
||||
wg.Add(1)
|
||||
stream := stream
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
f, err := stream.Formats()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
for _, format := range f {
|
||||
formats = append(formats, format)
|
||||
parents = append(parents, stream)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
return
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// NewFormatStructure returns a new FormatStructure, based on the given formats
|
||||
func NewFormatStructure(formats []*crunchyroll.Format) *FormatStructure {
|
||||
return &FormatStructure{
|
||||
getFunc: func() ([]*crunchyroll.Format, []*crunchyroll.Stream, error) {
|
||||
return formats, nil, nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Formats returns all stored formats
|
||||
func (fs *FormatStructure) Formats() ([]*crunchyroll.Format, error) {
|
||||
var err error
|
||||
if fs.formats == nil {
|
||||
if fs.formats, fs.parents, err = fs.getFunc(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fs.initState = true
|
||||
}
|
||||
return fs.formats, nil
|
||||
}
|
||||
|
||||
// FormatParent returns the parent stream of a format (if present).
|
||||
// If the format or parent is not stored, an error will be returned
|
||||
func (fs *FormatStructure) FormatParent(format *crunchyroll.Format) (*crunchyroll.Stream, error) {
|
||||
formats, err := fs.Formats()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if fs.parents == nil {
|
||||
return nil, errors.New("no parents are given")
|
||||
}
|
||||
|
||||
for i, f := range formats {
|
||||
if f == format {
|
||||
return fs.parents[i], nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("given format could not be found")
|
||||
}
|
||||
|
||||
// InitAll recursive requests all given information.
|
||||
// All functions of FormatStructure or other structs in this file which are executed after this have a much lesser chance to return any error,
|
||||
// so the error return value of these functions can be pretty safely ignored.
|
||||
// This function should only be called if you need to the access to any function of FormatStructure which returns a crunchyroll.Format (or an array of it).
|
||||
// Re-calling this method can lead to heavy problems (believe me, it caused a simple bug and i've tried to fix it for several hours).
|
||||
// Check FormatStructure.InitAllState if you can call this method without causing bugs
|
||||
func (fs *FormatStructure) InitAll() error {
|
||||
var err error
|
||||
if fs.formats, fs.parents, err = fs.getFunc(); err != nil {
|
||||
return err
|
||||
}
|
||||
fs.initState = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// InitAllState returns FormatStructure.InitAll or FormatStructure.Formats was called.
|
||||
// If so, all errors which are returned by functions of structs in this file can be safely ignored
|
||||
func (fs *FormatStructure) InitAllState() bool {
|
||||
return fs.initState
|
||||
}
|
||||
|
||||
// AvailableLocales returns all available audio, subtitle and hardsub locales for all formats.
|
||||
// If includeEmpty is given, locales with no value are included too
|
||||
func (fs *FormatStructure) AvailableLocales(includeEmpty bool) (audioLocales []crunchyroll.LOCALE, subtitleLocales []crunchyroll.LOCALE, hardsubLocales []crunchyroll.LOCALE, err error) {
|
||||
var formats []*crunchyroll.Format
|
||||
if formats, err = fs.Formats(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
audioMap := map[crunchyroll.LOCALE]interface{}{}
|
||||
subtitleMap := map[crunchyroll.LOCALE]interface{}{}
|
||||
hardsubMap := map[crunchyroll.LOCALE]interface{}{}
|
||||
for _, format := range formats {
|
||||
// audio locale should always have a valid locale
|
||||
if includeEmpty || !includeEmpty && format.AudioLocale != "" {
|
||||
audioMap[format.AudioLocale] = nil
|
||||
}
|
||||
if format.Subtitles != nil {
|
||||
for _, subtitle := range format.Subtitles {
|
||||
if subtitle.Locale == "" && !includeEmpty {
|
||||
continue
|
||||
}
|
||||
subtitleMap[subtitle.Locale] = nil
|
||||
}
|
||||
}
|
||||
if includeEmpty || !includeEmpty && format.Hardsub != "" {
|
||||
hardsubMap[format.Hardsub] = nil
|
||||
}
|
||||
}
|
||||
|
||||
for k := range audioMap {
|
||||
audioLocales = append(audioLocales, k)
|
||||
}
|
||||
for k := range subtitleMap {
|
||||
subtitleLocales = append(subtitleLocales, k)
|
||||
}
|
||||
for k := range hardsubMap {
|
||||
hardsubLocales = append(hardsubLocales, k)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FilterFormatsByAudio returns all formats which have the given locale as their audio locale
|
||||
func (fs *FormatStructure) FilterFormatsByAudio(locale crunchyroll.LOCALE) (f []*crunchyroll.Format, err error) {
|
||||
var formats []*crunchyroll.Format
|
||||
if formats, err = fs.Formats(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, format := range formats {
|
||||
if format.AudioLocale == locale {
|
||||
f = append(f, format)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FilterFormatsBySubtitle returns all formats which have the given locale as their subtitle locale.
|
||||
// Hardsub indicates if the subtitle should be shown on the video itself
|
||||
func (fs *FormatStructure) FilterFormatsBySubtitle(locale crunchyroll.LOCALE, hardsub bool) (f []*crunchyroll.Format, err error) {
|
||||
var formats []*crunchyroll.Format
|
||||
if formats, err = fs.Formats(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, format := range formats {
|
||||
if hardsub && format.Hardsub == locale {
|
||||
f = append(f, format)
|
||||
} else if !hardsub && format.Hardsub == "" {
|
||||
f = append(f, format)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// FilterFormatsByLocales returns all formats which have the given locales as their property.
|
||||
// Hardsub is the same as in FormatStructure.FilterFormatsBySubtitle
|
||||
func (fs *FormatStructure) FilterFormatsByLocales(audioLocale, subtitleLocale crunchyroll.LOCALE, hardsub bool) ([]*crunchyroll.Format, error) {
|
||||
var f []*crunchyroll.Format
|
||||
|
||||
formats, err := fs.Formats()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, format := range formats {
|
||||
if format.AudioLocale == audioLocale {
|
||||
if hardsub && format.Hardsub == subtitleLocale {
|
||||
f = append(f, format)
|
||||
} else if !hardsub && format.Hardsub == "" {
|
||||
f = append(f, format)
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(f) == 0 {
|
||||
return nil, errors.New("could not find any matching format")
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// OrderFormatsByID loops through all stored formats and returns a 2d slice
|
||||
// where a row represents an id and the column all formats which have this id
|
||||
func (fs *FormatStructure) OrderFormatsByID() ([][]*crunchyroll.Format, error) {
|
||||
formats, err := fs.Formats()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
formatsMap := map[string][]*crunchyroll.Format{}
|
||||
for _, format := range formats {
|
||||
if _, ok := formatsMap[format.ID]; !ok {
|
||||
formatsMap[format.ID] = make([]*crunchyroll.Format, 0)
|
||||
}
|
||||
formatsMap[format.ID] = append(formatsMap[format.ID], format)
|
||||
}
|
||||
|
||||
var orderedFormats [][]*crunchyroll.Format
|
||||
for _, v := range formatsMap {
|
||||
var f []*crunchyroll.Format
|
||||
for _, format := range v {
|
||||
f = append(f, format)
|
||||
}
|
||||
orderedFormats = append(orderedFormats, f)
|
||||
}
|
||||
return orderedFormats, nil
|
||||
}
|
||||
|
||||
// StreamStructure fields are nearly same as FormatStructure
|
||||
type StreamStructure struct {
|
||||
*FormatStructure
|
||||
|
||||
getFunc func() ([]*crunchyroll.Stream, []crunchyroll.Video, error)
|
||||
streams []*crunchyroll.Stream
|
||||
parents []crunchyroll.Video
|
||||
}
|
||||
|
||||
func newStreamStructure(structure VideoStructure) *StreamStructure {
|
||||
var getFunc func() (streams []*crunchyroll.Stream, parents []crunchyroll.Video, err error)
|
||||
switch structure.(type) {
|
||||
case *EpisodeStructure:
|
||||
episodeStructure := structure.(*EpisodeStructure)
|
||||
getFunc = func() (streams []*crunchyroll.Stream, parents []crunchyroll.Video, err error) {
|
||||
episodes, err := episodeStructure.Episodes()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
var lock sync.Mutex
|
||||
|
||||
for _, episode := range episodes {
|
||||
wg.Add(1)
|
||||
episode := episode
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
s, err := episode.Streams()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
for _, stream := range s {
|
||||
streams = append(streams, stream)
|
||||
parents = append(parents, episode)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
return
|
||||
}
|
||||
case *MovieListingStructure:
|
||||
movieListingStructure := structure.(*MovieListingStructure)
|
||||
getFunc = func() (streams []*crunchyroll.Stream, parents []crunchyroll.Video, err error) {
|
||||
movieListings, err := movieListingStructure.MovieListings()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
var lock sync.Mutex
|
||||
|
||||
for _, movieListing := range movieListings {
|
||||
wg.Add(1)
|
||||
movieListing := movieListing
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
s, err := movieListing.Streams()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
for _, stream := range s {
|
||||
streams = append(streams, stream)
|
||||
parents = append(parents, movieListing)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
ss := &StreamStructure{
|
||||
getFunc: getFunc,
|
||||
}
|
||||
ss.FormatStructure = newFormatStructure(ss)
|
||||
return ss
|
||||
}
|
||||
|
||||
// NewStreamStructure returns a new StreamStructure, based on the given formats
|
||||
func NewStreamStructure(streams []*crunchyroll.Stream) *StreamStructure {
|
||||
ss := &StreamStructure{
|
||||
getFunc: func() ([]*crunchyroll.Stream, []crunchyroll.Video, error) {
|
||||
return streams, nil, nil
|
||||
},
|
||||
}
|
||||
ss.FormatStructure = newFormatStructure(ss)
|
||||
return ss
|
||||
}
|
||||
|
||||
// Streams returns all stored streams
|
||||
func (ss *StreamStructure) Streams() ([]*crunchyroll.Stream, error) {
|
||||
if ss.streams == nil {
|
||||
var err error
|
||||
if ss.streams, ss.parents, err = ss.getFunc(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ss.streams, nil
|
||||
}
|
||||
|
||||
// StreamParent returns the parent video (type crunchyroll.Series or crunchyroll.Movie) of a stream (if present).
|
||||
// If the stream or parent is not stored, an error will be returned
|
||||
func (ss *StreamStructure) StreamParent(stream *crunchyroll.Stream) (crunchyroll.Video, error) {
|
||||
streams, err := ss.Streams()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ss.parents == nil {
|
||||
return nil, errors.New("no parents are given")
|
||||
}
|
||||
|
||||
for i, s := range streams {
|
||||
if s == stream {
|
||||
return ss.parents[i], nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("given stream could not be found")
|
||||
}
|
||||
|
||||
// VideoStructure is an interface which is implemented by EpisodeStructure and MovieListingStructure
|
||||
type VideoStructure interface{}
|
||||
|
||||
// EpisodeStructure fields are nearly same as FormatStructure
|
||||
type EpisodeStructure struct {
|
||||
VideoStructure
|
||||
*StreamStructure
|
||||
|
||||
getFunc func() ([]*crunchyroll.Episode, []*crunchyroll.Season, error)
|
||||
episodes []*crunchyroll.Episode
|
||||
parents []*crunchyroll.Season
|
||||
}
|
||||
|
||||
func newEpisodeStructure(structure *SeasonStructure) *EpisodeStructure {
|
||||
es := &EpisodeStructure{
|
||||
getFunc: func() (episodes []*crunchyroll.Episode, parents []*crunchyroll.Season, err error) {
|
||||
seasons, err := structure.Seasons()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
var lock sync.Mutex
|
||||
|
||||
for _, season := range seasons {
|
||||
wg.Add(1)
|
||||
season := season
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
e, err := season.Episodes()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
for _, episode := range e {
|
||||
episodes = append(episodes, episode)
|
||||
parents = append(parents, season)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
return
|
||||
},
|
||||
}
|
||||
es.StreamStructure = newStreamStructure(es)
|
||||
return es
|
||||
}
|
||||
|
||||
// NewEpisodeStructure returns a new EpisodeStructure, based on the given formats
|
||||
func NewEpisodeStructure(episodes []*crunchyroll.Episode) *EpisodeStructure {
|
||||
es := &EpisodeStructure{
|
||||
getFunc: func() ([]*crunchyroll.Episode, []*crunchyroll.Season, error) {
|
||||
return episodes, nil, nil
|
||||
},
|
||||
}
|
||||
es.StreamStructure = newStreamStructure(es)
|
||||
return es
|
||||
}
|
||||
|
||||
// Episodes returns all stored episodes
|
||||
func (es *EpisodeStructure) Episodes() ([]*crunchyroll.Episode, error) {
|
||||
if es.episodes == nil {
|
||||
var err error
|
||||
if es.episodes, es.parents, err = es.getFunc(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return es.episodes, nil
|
||||
}
|
||||
|
||||
// EpisodeParent returns the parent season of a stream (if present).
|
||||
// If the stream or parent is not stored, an error will be returned
|
||||
func (es *EpisodeStructure) EpisodeParent(episode *crunchyroll.Episode) (*crunchyroll.Season, error) {
|
||||
episodes, err := es.Episodes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if es.parents == nil {
|
||||
return nil, errors.New("no parents are given")
|
||||
}
|
||||
|
||||
for i, e := range episodes {
|
||||
if e == episode {
|
||||
return es.parents[i], nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("given episode could not be found")
|
||||
}
|
||||
|
||||
// GetEpisodeByFormat returns the episode to which the given format belongs to.
|
||||
// If the format or the parent is not stored, an error will be returned
|
||||
func (es *EpisodeStructure) GetEpisodeByFormat(format *crunchyroll.Format) (*crunchyroll.Episode, error) {
|
||||
if !es.initState {
|
||||
if err := es.InitAll(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
formatParent, err := es.FormatParent(format)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
streamParent, err := es.StreamParent(formatParent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
episode, ok := streamParent.(*crunchyroll.Episode)
|
||||
if !ok {
|
||||
return nil, errors.New("could not find parent episode")
|
||||
}
|
||||
return episode, nil
|
||||
}
|
||||
|
||||
func (es *EpisodeStructure) OrderEpisodeByID() ([][]*crunchyroll.Episode, error) {
|
||||
episodes, err := es.Episodes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
episodesMap := map[string][]*crunchyroll.Episode{}
|
||||
for _, episode := range episodes {
|
||||
if _, ok := episodesMap[episode.ID]; !ok {
|
||||
episodesMap[episode.ID] = make([]*crunchyroll.Episode, 0)
|
||||
}
|
||||
episodesMap[episode.ID] = append(episodesMap[episode.ID], episode)
|
||||
}
|
||||
|
||||
var orderedEpisodes [][]*crunchyroll.Episode
|
||||
for _, v := range episodesMap {
|
||||
orderedEpisodes = append(orderedEpisodes, v)
|
||||
}
|
||||
return orderedEpisodes, nil
|
||||
}
|
||||
|
||||
func (es *EpisodeStructure) OrderFormatsByEpisodeNumber() ([][]*crunchyroll.Format, error) {
|
||||
formats, err := es.Formats()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
formatsMap := map[int][]*crunchyroll.Format{}
|
||||
for _, format := range formats {
|
||||
stream, err := es.FormatParent(format)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
video, err := es.StreamParent(stream)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
episode, ok := video.(*crunchyroll.Episode)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := formatsMap[episode.EpisodeNumber]; !ok {
|
||||
formatsMap[episode.EpisodeNumber] = make([]*crunchyroll.Format, 0)
|
||||
}
|
||||
formatsMap[episode.EpisodeNumber] = append(formatsMap[episode.EpisodeNumber], format)
|
||||
}
|
||||
|
||||
keys := make([]int, 0, len(formatsMap))
|
||||
for k := range formatsMap {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Ints(keys)
|
||||
|
||||
var orderedFormats [][]*crunchyroll.Format
|
||||
for _, k := range keys {
|
||||
orderedFormats = append(orderedFormats, formatsMap[k])
|
||||
}
|
||||
return orderedFormats, nil
|
||||
}
|
||||
|
||||
// SeasonStructure fields are nearly same as FormatStructure
|
||||
type SeasonStructure struct {
|
||||
*EpisodeStructure
|
||||
|
||||
getFunc func() ([]*crunchyroll.Season, error)
|
||||
seasons []*crunchyroll.Season
|
||||
}
|
||||
|
||||
// NewSeasonStructure returns a new SeasonStructure, based on the given formats
|
||||
func NewSeasonStructure(seasons []*crunchyroll.Season) *SeasonStructure {
|
||||
ss := &SeasonStructure{
|
||||
seasons: seasons,
|
||||
}
|
||||
ss.EpisodeStructure = newEpisodeStructure(ss)
|
||||
return ss
|
||||
}
|
||||
|
||||
// Seasons returns all stored seasons
|
||||
func (ss *SeasonStructure) Seasons() ([]*crunchyroll.Season, error) {
|
||||
if ss.seasons == nil {
|
||||
var err error
|
||||
if ss.seasons, err = ss.getFunc(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ss.seasons, nil
|
||||
}
|
||||
|
||||
// MovieListingStructure fields are nearly same as FormatStructure
|
||||
type MovieListingStructure struct {
|
||||
VideoStructure
|
||||
*StreamStructure
|
||||
|
||||
getFunc func() ([]*crunchyroll.MovieListing, error)
|
||||
movieListings []*crunchyroll.MovieListing
|
||||
}
|
||||
|
||||
// NewMovieListingStructure returns a new MovieListingStructure, based on the given formats
|
||||
func NewMovieListingStructure(movieListings []*crunchyroll.MovieListing) *MovieListingStructure {
|
||||
ml := &MovieListingStructure{
|
||||
getFunc: func() ([]*crunchyroll.MovieListing, error) {
|
||||
return movieListings, nil
|
||||
},
|
||||
}
|
||||
ml.StreamStructure = newStreamStructure(ml)
|
||||
return ml
|
||||
}
|
||||
|
||||
// MovieListings returns all stored movie listings
|
||||
func (ml *MovieListingStructure) MovieListings() ([]*crunchyroll.MovieListing, error) {
|
||||
if ml.movieListings == nil {
|
||||
var err error
|
||||
if ml.movieListings, err = ml.getFunc(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ml.movieListings, nil
|
||||
}
|
||||
|
||||
// GetMovieListingByFormat returns the movie listing to which the given format belongs to.
|
||||
// If the format or the parent is not stored, an error will be returned
|
||||
func (ml *MovieListingStructure) GetMovieListingByFormat(format *crunchyroll.Format) (*crunchyroll.MovieListing, error) {
|
||||
if !ml.initState {
|
||||
if err := ml.InitAll(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
formatParent, err := ml.FormatParent(format)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
streamParent, err := ml.StreamParent(formatParent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
movieListing, ok := streamParent.(*crunchyroll.MovieListing)
|
||||
if !ok {
|
||||
return nil, errors.New("could not find parent movie listing")
|
||||
}
|
||||
return movieListing, nil
|
||||
}
|
||||
208
video.go
208
video.go
|
|
@ -1,208 +0,0 @@
|
|||
package crunchyroll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type video struct {
|
||||
ID string `json:"id"`
|
||||
ExternalID string `json:"external_id"`
|
||||
|
||||
Description string `json:"description"`
|
||||
Title string `json:"title"`
|
||||
Slug string `json:"slug"`
|
||||
SlugTitle string `json:"slug_title"`
|
||||
|
||||
Images struct {
|
||||
PosterTall [][]struct {
|
||||
Height int `json:"height"`
|
||||
Source string `json:"source"`
|
||||
Type string `json:"type"`
|
||||
Width int `json:"width"`
|
||||
} `json:"poster_tall"`
|
||||
PosterWide [][]struct {
|
||||
Height int `json:"height"`
|
||||
Source string `json:"source"`
|
||||
Type string `json:"type"`
|
||||
Width int `json:"width"`
|
||||
} `json:"poster_wide"`
|
||||
} `json:"images"`
|
||||
}
|
||||
|
||||
type Video interface{}
|
||||
|
||||
type Movie struct {
|
||||
video
|
||||
Video
|
||||
|
||||
crunchy *Crunchyroll
|
||||
|
||||
// not generated when calling MovieFromID
|
||||
MovieListingMetadata struct {
|
||||
AvailabilityNotes string `json:"availability_notes"`
|
||||
AvailableOffline bool `json:"available_offline"`
|
||||
DurationMS int `json:"duration_ms"`
|
||||
ExtendedDescription string `json:"extended_description"`
|
||||
FirstMovieID string `json:"first_movie_id"`
|
||||
IsDubbed bool `json:"is_dubbed"`
|
||||
IsMature bool `json:"is_mature"`
|
||||
IsPremiumOnly bool `json:"is_premium_only"`
|
||||
IsSubbed bool `json:"is_subbed"`
|
||||
MatureRatings []string `json:"mature_ratings"`
|
||||
MovieReleaseYear int `json:"movie_release_year"`
|
||||
SubtitleLocales []LOCALE `json:"subtitle_locales"`
|
||||
} `json:"movie_listing_metadata"`
|
||||
|
||||
Playback string `json:"playback"`
|
||||
|
||||
PromoDescription string `json:"promo_description"`
|
||||
PromoTitle string `json:"promo_title"`
|
||||
SearchMetadata struct {
|
||||
Score float64 `json:"score"`
|
||||
}
|
||||
}
|
||||
|
||||
// MovieFromID returns a movie by its api id
|
||||
func MovieFromID(crunchy *Crunchyroll, id string) (*Movie, error) {
|
||||
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies/%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
movieListing := &Movie{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(jsonBody, movieListing); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return movieListing, nil
|
||||
}
|
||||
|
||||
// MovieListing returns all videos corresponding with the movie.
|
||||
// Beside the normal movie, sometimes movie previews are returned too, but you can try to get the actual movie
|
||||
// by sorting the returning MovieListing slice with the utils.MovieListingByDuration interface
|
||||
func (m *Movie) MovieListing() (movieListings []*MovieListing, err error) {
|
||||
resp, err := m.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies?movie_listing_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
m.crunchy.Config.CountryCode,
|
||||
m.crunchy.Config.MaturityRating,
|
||||
m.crunchy.Config.Channel,
|
||||
m.ID,
|
||||
m.crunchy.Locale,
|
||||
m.crunchy.Config.Signature,
|
||||
m.crunchy.Config.Policy,
|
||||
m.crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
for _, item := range jsonBody["items"].([]interface{}) {
|
||||
movieListing := &MovieListing{
|
||||
crunchy: m.crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(item, movieListing); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
movieListings = append(movieListings, movieListing)
|
||||
}
|
||||
return movieListings, nil
|
||||
}
|
||||
|
||||
type Series struct {
|
||||
video
|
||||
Video
|
||||
|
||||
crunchy *Crunchyroll
|
||||
|
||||
PromoDescription string `json:"promo_description"`
|
||||
PromoTitle string `json:"promo_title"`
|
||||
|
||||
AvailabilityNotes string `json:"availability_notes"`
|
||||
EpisodeCount int `json:"episode_count"`
|
||||
ExtendedDescription string `json:"extended_description"`
|
||||
IsDubbed bool `json:"is_dubbed"`
|
||||
IsMature bool `json:"is_mature"`
|
||||
IsSimulcast bool `json:"is_simulcast"`
|
||||
IsSubbed bool `json:"is_subbed"`
|
||||
MatureBlocked bool `json:"mature_blocked"`
|
||||
MatureRatings []string `json:"mature_ratings"`
|
||||
SeasonCount int `json:"season_count"`
|
||||
|
||||
// not generated when calling SeriesFromID
|
||||
SearchMetadata struct {
|
||||
Score float64 `json:"score"`
|
||||
}
|
||||
}
|
||||
|
||||
// SeriesFromID returns a series by its api id
|
||||
func SeriesFromID(crunchy *Crunchyroll, id string) (*Series, error) {
|
||||
resp, err := crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/movies?movie_listing_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
crunchy.Config.CountryCode,
|
||||
crunchy.Config.MaturityRating,
|
||||
crunchy.Config.Channel,
|
||||
id,
|
||||
crunchy.Locale,
|
||||
crunchy.Config.Signature,
|
||||
crunchy.Config.Policy,
|
||||
crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
series := &Series{
|
||||
crunchy: crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(jsonBody, series); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return series, nil
|
||||
}
|
||||
|
||||
// Seasons returns all seasons of a series
|
||||
func (s *Series) Seasons() (seasons []*Season, err error) {
|
||||
resp, err := s.crunchy.request(fmt.Sprintf("https://beta-api.crunchyroll.com/cms/v2/%s/%s/%s/seasons?series_id=%s&locale=%s&Signature=%s&Policy=%s&Key-Pair-Id=%s",
|
||||
s.crunchy.Config.CountryCode,
|
||||
s.crunchy.Config.MaturityRating,
|
||||
s.crunchy.Config.Channel,
|
||||
s.ID,
|
||||
s.crunchy.Locale,
|
||||
s.crunchy.Config.Signature,
|
||||
s.crunchy.Config.Policy,
|
||||
s.crunchy.Config.KeyPairID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var jsonBody map[string]interface{}
|
||||
json.NewDecoder(resp.Body).Decode(&jsonBody)
|
||||
|
||||
for _, item := range jsonBody["items"].([]interface{}) {
|
||||
season := &Season{
|
||||
crunchy: s.crunchy,
|
||||
}
|
||||
if err = decodeMapToStruct(item, season); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
seasons = append(seasons, season)
|
||||
}
|
||||
return
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue