mirror of
https://github.com/OpenListTeam/OpenList.git
synced 2025-11-25 03:15:19 +08:00
Compare commits
2175 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7f53390dce | ||
|
|
e83f8e197a | ||
|
|
d707f002eb | ||
|
|
c0f69f7fa7 | ||
|
|
adf914115f | ||
|
|
c166fe6127 | ||
|
|
9725e0fd76 | ||
|
|
5c4cd1b198 | ||
|
|
44f4658f37 | ||
|
|
b4997e7a7e | ||
|
|
f26892ac3c | ||
|
|
aae3851979 | ||
|
|
a17b3dc405 | ||
|
|
022614f155 | ||
|
|
874dc292ae | ||
|
|
9442013b37 | ||
|
|
862b1c3c53 | ||
|
|
52c93f2046 | ||
|
|
3d13d5213b | ||
|
|
103abc942e | ||
|
|
f0236522f3 | ||
|
|
6a3b8fab06 | ||
|
|
5c288dc763 | ||
|
|
6d0d3ac612 | ||
|
|
1ec97733e5 | ||
|
|
ded67b746b | ||
|
|
4590795cba | ||
|
|
060fd36883 | ||
|
|
76a1f99df1 | ||
|
|
38766a4cb7 | ||
|
|
bcc518cf96 | ||
|
|
3fdb2c79bf | ||
|
|
18f7a2ba0e | ||
|
|
e32cebb153 | ||
|
|
02031bd835 | ||
|
|
7726cb14a0 | ||
|
|
23cfe8090b | ||
|
|
d89d0a05b4 | ||
|
|
14d57ae2ec | ||
|
|
d5f4b687bb | ||
|
|
bdb880f9f2 | ||
|
|
22575a1c61 | ||
|
|
890297aa27 | ||
|
|
0fd602bc1b | ||
|
|
f6470af971 | ||
|
|
d695d28e13 | ||
|
|
ffc14ea14c | ||
|
|
25df3daba5 | ||
|
|
ce3cb2e31e | ||
|
|
afe23986d2 | ||
|
|
0026f0c860 | ||
|
|
9e69b2aaa3 | ||
|
|
af71deb407 | ||
|
|
fe079cf0a3 | ||
|
|
cf85d49b6c | ||
|
|
96cf2f7cf9 | ||
|
|
b0736d2d02 | ||
|
|
49213c1321 | ||
|
|
64dd3cb047 | ||
|
|
12fd52b6b7 | ||
|
|
27533d0e20 | ||
|
|
34a2eeb4a9 | ||
|
|
652e4ba1cb | ||
|
|
639b5cf7c2 | ||
|
|
b5c1386645 | ||
|
|
041868dfb8 | ||
|
|
cfbc157477 | ||
|
|
5d44806064 | ||
|
|
fc8b99c862 | ||
|
|
24560b43c0 | ||
|
|
39ca385778 | ||
|
|
ef0531ad40 | ||
|
|
12540a8abc | ||
|
|
0f5ed14fe2 | ||
|
|
ca55b89322 | ||
|
|
a3c7cb059d | ||
|
|
0f8545133b | ||
|
|
72fad1be2e | ||
|
|
b7ce7f172b | ||
|
|
248c041711 | ||
|
|
70b937e031 | ||
|
|
79521db8e0 | ||
|
|
015d3ecd00 | ||
|
|
89451b6d98 | ||
|
|
681cb6c8a4 | ||
|
|
c2d1316f65 | ||
|
|
5e8d8d070a | ||
|
|
c7c0bfe810 | ||
|
|
e9c73b52db | ||
|
|
7d24a5d45f | ||
|
|
3ab309e00e | ||
|
|
8822eef97e | ||
|
|
7613f886d0 | ||
|
|
fe02a989bd | ||
|
|
2bed40cfce | ||
|
|
87ca1b96ae | ||
|
|
5a4649c929 | ||
|
|
2e2cec05fd | ||
|
|
b1afadd129 | ||
|
|
a59ad9a84e | ||
|
|
2e889fb07d | ||
|
|
d95c4f0127 | ||
|
|
1c58d11d62 | ||
|
|
e11c390c4d | ||
|
|
2965915bed | ||
|
|
da1cfd1945 | ||
|
|
8a29790327 | ||
|
|
7cd8f648c8 | ||
|
|
b8e6083e19 | ||
|
|
3f821bdcd1 | ||
|
|
9e05c81d9c | ||
|
|
f1552b67a0 | ||
|
|
20d1d5b479 | ||
|
|
fdcc2f136e | ||
|
|
5feb86ceee | ||
|
|
ee783fa1be | ||
|
|
0bcb4fe16d | ||
|
|
4f57bd3ae6 | ||
|
|
cf42fe6a40 | ||
|
|
c4775521c6 | ||
|
|
ffa03bfda1 | ||
|
|
630cf30af5 | ||
|
|
bc5117fa4f | ||
|
|
11e7284824 | ||
|
|
b2b91a9281 | ||
|
|
f541489d7d | ||
|
|
6d9c554f6f | ||
|
|
e532ab31ef | ||
|
|
bf0705ec17 | ||
|
|
17b42b9fa4 | ||
|
|
41bdab49aa | ||
|
|
8f89c55aca | ||
|
|
b449312da8 | ||
|
|
52d4e8ec47 | ||
|
|
28e5b5759e | ||
|
|
477c43971f | ||
|
|
0a9921fa79 | ||
|
|
88abb323cb | ||
|
|
f0b1aeaf8d | ||
|
|
c8470b9a2a | ||
|
|
d0ee90cd11 | ||
|
|
544a7ea022 | ||
|
|
4f5cabc725 | ||
|
|
a2f266277c | ||
|
|
a4bfbf8a83 | ||
|
|
ddffacf07b | ||
|
|
3375c26c41 | ||
|
|
ab68faef44 | ||
|
|
2e21df0661 | ||
|
|
af18cb138b | ||
|
|
31c55a2adf | ||
|
|
465dd1703d | ||
|
|
a6304285b6 | ||
|
|
affd0cecd1 | ||
|
|
37640221c0 | ||
|
|
e4bd223d1c | ||
|
|
0cde4e73d6 | ||
|
|
7b62dcb88c | ||
|
|
c38dc6df7c | ||
|
|
5668e4a4ea | ||
|
|
1335f80362 | ||
|
|
704d3854df | ||
|
|
44cc71d354 | ||
|
|
9a9aee9ac6 | ||
|
|
4fcc3a187e | ||
|
|
10a76c701d | ||
|
|
6e13923225 | ||
|
|
32890da29f | ||
|
|
758554a40f | ||
|
|
4563aea47e | ||
|
|
35d6f3b8fc | ||
|
|
b4e6ab12d9 | ||
|
|
3499c4db87 | ||
|
|
d20f41d687 | ||
|
|
d16ba65f42 | ||
|
|
c82e632ee1 | ||
|
|
04f5525f20 | ||
|
|
28b61a93fd | ||
|
|
0126af4de0 | ||
|
|
7579d44517 | ||
|
|
5dfea714d8 | ||
|
|
370a6c15a9 | ||
|
|
2570707a06 | ||
|
|
4145734c18 | ||
|
|
646c7bcd21 | ||
|
|
cdc41595bc | ||
|
|
79bef0be9e | ||
|
|
c230f24ebe | ||
|
|
30d8c20756 | ||
|
|
3b71500f23 | ||
|
|
399336b33c | ||
|
|
36b4204623 | ||
|
|
f25be154c6 | ||
|
|
ec3fc945a3 | ||
|
|
3f9bed3d5f | ||
|
|
b9ad18bd0a | ||
|
|
0219c4e15a | ||
|
|
d983a4ebcb | ||
|
|
f795807753 | ||
|
|
6164e4577b | ||
|
|
39bde328ee | ||
|
|
779c293f04 | ||
|
|
b9f397d29f | ||
|
|
d53eecc229 | ||
|
|
f88fd83d4a | ||
|
|
226c34929a | ||
|
|
027edcbe53 | ||
|
|
fd51f34efa | ||
|
|
bdd9774aa7 | ||
|
|
258b8f520f | ||
|
|
99f39410f2 | ||
|
|
267120a8c8 | ||
|
|
5eff8cc7bf | ||
|
|
d5ec998699 | ||
|
|
23f3178f39 | ||
|
|
cafdb4d407 | ||
|
|
0d4c63e9ff | ||
|
|
5c5d8378e5 | ||
|
|
2be0c3d1a0 | ||
|
|
bdcf450203 | ||
|
|
c2633dd443 | ||
|
|
11b6a6012f | ||
|
|
59e02287b2 | ||
|
|
bb40e2e2cd | ||
|
|
ab22cf8233 | ||
|
|
880cc7abca | ||
|
|
b60da9732f | ||
|
|
e04114d102 | ||
|
|
51bcf83511 | ||
|
|
25b4b55ee1 | ||
|
|
6812ec9a6d | ||
|
|
31a7470865 | ||
|
|
687124c81d | ||
|
|
e4439e66b9 | ||
|
|
7fd4ac7851 | ||
|
|
6745dcc139 | ||
|
|
aa1082a56c | ||
|
|
ed149be84b | ||
|
|
040dc14ee6 | ||
|
|
4dce53d72b | ||
|
|
365fc40dfe | ||
|
|
5994c17b4e | ||
|
|
42243b1517 | ||
|
|
48916cdedf | ||
|
|
5ecf5e823c | ||
|
|
c218b5701e | ||
|
|
77d0c78bfd | ||
|
|
db5c601cfe | ||
|
|
221cdf3611 | ||
|
|
40b0e66efe | ||
|
|
b72e85a73a | ||
|
|
6aaf5975c6 | ||
|
|
bb2aec20e4 | ||
|
|
d7aa1608ac | ||
|
|
db99224126 | ||
|
|
b8bd14f99b | ||
|
|
331885ed64 | ||
|
|
cf58ab3a78 | ||
|
|
33ba7f1521 | ||
|
|
201e25c17f | ||
|
|
ecefa5e0eb | ||
|
|
650b03aeb1 | ||
|
|
7341846499 | ||
|
|
a3908fd9a6 | ||
|
|
2a035302b2 | ||
|
|
016e169c41 | ||
|
|
088120df82 | ||
|
|
aa45a82914 | ||
|
|
5084d98398 | ||
|
|
fa15c576f0 | ||
|
|
2d3605c684 | ||
|
|
492b49d77a | ||
|
|
94915b2148 | ||
|
|
2dec756f23 | ||
|
|
4c0cffd29b | ||
|
|
25c5e075a9 | ||
|
|
398c04386a | ||
|
|
12b429584e | ||
|
|
150dcc2147 | ||
|
|
0ba754fd40 | ||
|
|
28d2367a87 | ||
|
|
a4ad98ee3e | ||
|
|
1c01dc6839 | ||
|
|
c3c5843dce | ||
|
|
6c38c5972d | ||
|
|
0a46979c51 | ||
|
|
67c93eed2b | ||
|
|
f58de9923a | ||
|
|
2671c876f1 | ||
|
|
e707fa38f1 | ||
|
|
b803b0070e | ||
|
|
64ceb5afb6 | ||
|
|
10c7ebb1c0 | ||
|
|
d0cda62703 | ||
|
|
ce0b99a510 | ||
|
|
34a148c83d | ||
|
|
4955d8cec8 | ||
|
|
216e3909f3 | ||
|
|
a701432b8b | ||
|
|
a2dc45a80b | ||
|
|
48ac23c8de | ||
|
|
2830575490 | ||
|
|
e8538bd215 | ||
|
|
c3e43ff605 | ||
|
|
5f19d73fcc | ||
|
|
bdf4b52885 | ||
|
|
6106a2d4cc | ||
|
|
b6451451b1 | ||
|
|
f06d2c0348 | ||
|
|
b7ae56b109 | ||
|
|
5d9167d676 | ||
|
|
1b42b9627c | ||
|
|
bb58b94a10 | ||
|
|
ffce61d227 | ||
|
|
0310b70d90 | ||
|
|
73f0b135b6 | ||
|
|
8316f81e41 | ||
|
|
cdbfda8921 | ||
|
|
9667832b32 | ||
|
|
b36d38f63f | ||
|
|
c8317250c1 | ||
|
|
0242f36e1c | ||
|
|
40a68bcee6 | ||
|
|
92713ef5c4 | ||
|
|
716d33fddd | ||
|
|
c9fa3d7cd6 | ||
|
|
4874c9e43b | ||
|
|
34ada81582 | ||
|
|
ba716ae325 | ||
|
|
d4f9c4b6af | ||
|
|
b910b8917f | ||
|
|
d92744e673 | ||
|
|
868b0ec25c | ||
|
|
e21edf98e2 | ||
|
|
d2514d236f | ||
|
|
34b6785fab | ||
|
|
48f50a2ceb | ||
|
|
74887922b4 | ||
|
|
bcb24d61ea | ||
|
|
db1494455d | ||
|
|
d9a1809313 | ||
|
|
0715198c7f | ||
|
|
ef5e192c3b | ||
|
|
489b28bdf7 | ||
|
|
18176c659c | ||
|
|
4c48a816bf | ||
|
|
9af7aaab59 | ||
|
|
a54a09314f | ||
|
|
e2fcd73720 | ||
|
|
e238b90836 | ||
|
|
69e5b66b50 | ||
|
|
e8e6d71c41 | ||
|
|
4ba476e25c | ||
|
|
e5fe9ea5f6 | ||
|
|
e1906c9312 | ||
|
|
51c95ee117 | ||
|
|
1f652e2e7d | ||
|
|
8e6c1aa78d | ||
|
|
6bff5b6107 | ||
|
|
94937db491 | ||
|
|
3dc250cc37 | ||
|
|
9560799175 | ||
|
|
8f3c5b1587 | ||
|
|
285125d06a | ||
|
|
a26185fe05 | ||
|
|
a7efa3a676 | ||
|
|
d596ef5c38 | ||
|
|
34e34ef564 | ||
|
|
8032d0afb6 | ||
|
|
d3bc8993ee | ||
|
|
62ed169a39 | ||
|
|
979d0cfeee | ||
|
|
29165d8e60 | ||
|
|
2d77db6bc2 | ||
|
|
74f8295960 | ||
|
|
f2727095d9 | ||
|
|
d4285b7c6c | ||
|
|
2e4265a778 | ||
|
|
81258d3e8a | ||
|
|
a6bead90d7 | ||
|
|
87caaf2459 | ||
|
|
af9c6afd25 | ||
|
|
8b5727a0aa | ||
|
|
aeae47c9bf | ||
|
|
1aff758688 | ||
|
|
4a42bc5083 | ||
|
|
5fa70e4010 | ||
|
|
d4e3355f56 | ||
|
|
94f257e557 | ||
|
|
e5f53d6dee | ||
|
|
cbd4bef814 | ||
|
|
2d57529e77 | ||
|
|
2b74999703 | ||
|
|
fe081d0ebc | ||
|
|
5ef7a27be3 | ||
|
|
c9a18f4de6 | ||
|
|
f2a24881d0 | ||
|
|
cee00005ab | ||
|
|
049575b5a5 | ||
|
|
a93937f80d | ||
|
|
488ebaa1af | ||
|
|
8278d3875b | ||
|
|
736ba44031 | ||
|
|
a6ff6a94df | ||
|
|
17f78b948a | ||
|
|
fe1040a367 | ||
|
|
83048e6c7c | ||
|
|
9128647970 | ||
|
|
9629705100 | ||
|
|
cd663f78af | ||
|
|
3c483ace4f | ||
|
|
3e949fcf33 | ||
|
|
81b0afc349 | ||
|
|
a04da3ec50 | ||
|
|
9e0482afbb | ||
|
|
9de40f8976 | ||
|
|
ba4df55d6e | ||
|
|
de8d2d6dc0 | ||
|
|
65b423c503 | ||
|
|
ff20b5a6fb | ||
|
|
37d86ff55c | ||
|
|
4e1c67617f | ||
|
|
9bc2d340a2 | ||
|
|
60fc416d8f | ||
|
|
99c9632cdc | ||
|
|
2fb772c888 | ||
|
|
87192ad07d | ||
|
|
3746831384 | ||
|
|
80d4fbb870 | ||
|
|
92c65b450e | ||
|
|
213fc0232e | ||
|
|
33be44adad | ||
|
|
ca0d66bd01 | ||
|
|
3a3d0adfa0 | ||
|
|
ca30849e24 | ||
|
|
316f3569a5 | ||
|
|
2705877235 | ||
|
|
432901db5a | ||
|
|
227d034db8 | ||
|
|
453d7da622 | ||
|
|
29fe49fb87 | ||
|
|
fcf2683112 | ||
|
|
3a996a1a3a | ||
|
|
1b14d33b9f | ||
|
|
639b7817bf | ||
|
|
163af0515f | ||
|
|
8e2b9c681a | ||
|
|
0a8d710e01 | ||
|
|
d781f7127a | ||
|
|
85d743c5d2 | ||
|
|
5f60b51cf8 | ||
|
|
7013d1b7b8 | ||
|
|
9eec872637 | ||
|
|
037850bbd5 | ||
|
|
bbe3d4e19f | ||
|
|
78a9676c7c | ||
|
|
8bf93562eb | ||
|
|
b57afd0a98 | ||
|
|
f261ef50cc | ||
|
|
7e7b9b9b48 | ||
|
|
2313213f59 | ||
|
|
5f28532423 | ||
|
|
4cbbda8832 | ||
|
|
7bf5014417 | ||
|
|
b704bba444 | ||
|
|
eecea3febd | ||
|
|
0e246a7b0c | ||
|
|
b95df1d745 | ||
|
|
ec08ecdf6c | ||
|
|
479fc6d466 | ||
|
|
32ddab9b01 | ||
|
|
0c9dcec9cd | ||
|
|
793a4ea6ca | ||
|
|
c3c5181847 | ||
|
|
cd5a8a011d | ||
|
|
1756036a21 | ||
|
|
58c3cb3cf6 | ||
|
|
d8e190406a | ||
|
|
2880ed70ce | ||
|
|
0e86036874 | ||
|
|
e37465e67e | ||
|
|
d517adde71 | ||
|
|
8a18f47e68 | ||
|
|
cf08aa3668 | ||
|
|
9c84b6596f | ||
|
|
022e0ca292 | ||
|
|
88947f6676 | ||
|
|
b07ddfbc13 | ||
|
|
9a0a63d34c | ||
|
|
195c869272 | ||
|
|
bdfc1591bd | ||
|
|
82222840fe | ||
|
|
45e009a22c | ||
|
|
ac68079a76 | ||
|
|
2a17d0c2cd | ||
|
|
6f6a8e6dfc | ||
|
|
7d9ecba99c | ||
|
|
ae6984714d | ||
|
|
d0f88bd1cb | ||
|
|
f8b1f87a5f | ||
|
|
71e4e1ab6e | ||
|
|
7e6522c81e | ||
|
|
94a80bccfe | ||
|
|
e66abb3f58 | ||
|
|
742335f80e | ||
|
|
f1979a8bbc | ||
|
|
1f835502ba | ||
|
|
424ab2d0c0 | ||
|
|
858ba19670 | ||
|
|
0c7e47a76c | ||
|
|
53926d5cd0 | ||
|
|
47f4b05517 | ||
|
|
6d85f1b0c0 | ||
|
|
e49fda3e2a | ||
|
|
da5e35578a | ||
|
|
812f58ae6d | ||
|
|
9bd3c87bcc | ||
|
|
c82866975e | ||
|
|
aef952ae68 | ||
|
|
9222510d8d | ||
|
|
d88b54d98a | ||
|
|
85a28d9822 | ||
|
|
4f7761fe2c | ||
|
|
a8c900d09e | ||
|
|
8bccb69e8d | ||
|
|
0f29a811bf | ||
|
|
442c2f77ea | ||
|
|
ce06f394f1 | ||
|
|
e3e790f461 | ||
|
|
f0e8c0e886 | ||
|
|
86b35ae5cf | ||
|
|
4930f85b90 | ||
|
|
85fe65951d | ||
|
|
1381e8fb27 | ||
|
|
292bbe94ee | ||
|
|
bb6747de4e | ||
|
|
555ef0eb1a | ||
|
|
bff56ffd0f | ||
|
|
34b73b94f7 | ||
|
|
434892f135 | ||
|
|
e6e2d03ba1 | ||
|
|
28bb3f6310 | ||
|
|
fb729c1846 | ||
|
|
4448e08f5b | ||
|
|
8020d42b10 | ||
|
|
9d5fb7f595 | ||
|
|
126cfe9f93 | ||
|
|
fd96a7ccf4 | ||
|
|
03b9b9a119 | ||
|
|
03dbdfc0dd | ||
|
|
2683621ed7 | ||
|
|
be537aa49b | ||
|
|
6f742a68cf | ||
|
|
97a4b8321d | ||
|
|
8c432d3339 | ||
|
|
ff25e51f80 | ||
|
|
88831b5d5a | ||
|
|
b97c9173af | ||
|
|
207c7e05fe | ||
|
|
7db27e6da8 | ||
|
|
b5cc90cb5a | ||
|
|
8a427ddc49 | ||
|
|
c36644a172 | ||
|
|
45b1ff4a24 | ||
|
|
a4a9675616 | ||
|
|
8531b23382 | ||
|
|
2c15349ce4 | ||
|
|
5afd65b65c | ||
|
|
e2434029f9 | ||
|
|
bdf7abe717 | ||
|
|
2c8d003c2e | ||
|
|
a006f57637 | ||
|
|
be5d94cd11 | ||
|
|
977b3cf9ab | ||
|
|
182aacd309 | ||
|
|
57bac9e0d2 | ||
|
|
478470f609 | ||
|
|
6b8f35e7fa | ||
|
|
697a0ed2d3 | ||
|
|
299bfb4d7b | ||
|
|
3eca38e599 | ||
|
|
ab216ed170 | ||
|
|
e91c42c9dc | ||
|
|
54f7b21a73 | ||
|
|
de56f926cf | ||
|
|
6d4ab57a0e | ||
|
|
734d4b0354 | ||
|
|
74b20dedc3 | ||
|
|
83c2269330 | ||
|
|
296be88b5f | ||
|
|
026e944cbb | ||
|
|
8bdfc7ac8e | ||
|
|
e4a6b758dc | ||
|
|
66b7fe1e1b | ||
|
|
f475eb4401 | ||
|
|
b99e709bdb | ||
|
|
f4dcf4599c | ||
|
|
54e75d7287 | ||
|
|
d142fc3449 | ||
|
|
f23567199b | ||
|
|
1420492d81 | ||
|
|
b88067ea2f | ||
|
|
d5f381ef6f | ||
|
|
68af284dad | ||
|
|
d26887d211 | ||
|
|
3f405de6a9 | ||
|
|
6100647310 | ||
|
|
34746e951c | ||
|
|
b6134dc515 | ||
|
|
d455a232ef | ||
|
|
fe34d30d17 | ||
|
|
0fbb986ba9 | ||
|
|
1280070438 | ||
|
|
d7f66138eb | ||
|
|
b2890f05ab | ||
|
|
7583c4d734 | ||
|
|
11a30c5044 | ||
|
|
de9647a5fa | ||
|
|
8d5283604c | ||
|
|
867accafd1 | ||
|
|
6fc6751463 | ||
|
|
f904596cbc | ||
|
|
3d51845f57 | ||
|
|
a7421d8fc2 | ||
|
|
55a14bc271 | ||
|
|
91f51f17d0 | ||
|
|
4355dae491 | ||
|
|
da1c7a4c23 | ||
|
|
769281bd40 | ||
|
|
3bbdd4fa89 | ||
|
|
68f440abdb | ||
|
|
65c5ec0c34 | ||
|
|
a6325967d0 | ||
|
|
4dff49470a | ||
|
|
cc86d6f3d1 | ||
|
|
c0f9c8ebaf | ||
|
|
4fc0a77565 | ||
|
|
aaffaee2b5 | ||
|
|
8ef8023c20 | ||
|
|
cdfbe6dcf2 | ||
|
|
94d028743a | ||
|
|
7f7335435c | ||
|
|
b9e192b29c | ||
|
|
69a98eaef6 | ||
|
|
1ebc96a4e5 | ||
|
|
66e2324cac | ||
|
|
7600dc28df | ||
|
|
8ef89ad0a4 | ||
|
|
35d672217d | ||
|
|
1a283bb272 | ||
|
|
a008f54f4d | ||
|
|
3d7f79cba8 | ||
|
|
9ff83a7950 | ||
|
|
e719a1a456 | ||
|
|
40a6fcbdff | ||
|
|
0fd51646f6 | ||
|
|
e8958019d9 | ||
|
|
e1ef690784 | ||
|
|
4024050dd0 | ||
|
|
eb918658f0 | ||
|
|
fb13dae136 | ||
|
|
6b67a36d63 | ||
|
|
a64dd4885e | ||
|
|
0f03a747d8 | ||
|
|
30977cdc6d | ||
|
|
106cf720c1 | ||
|
|
882112ed1c | ||
|
|
2a6ab77295 | ||
|
|
f0981a0c8d | ||
|
|
57eea4db17 | ||
|
|
234852ca61 | ||
|
|
809105b67e | ||
|
|
02e8c31506 | ||
|
|
19b39a5c04 | ||
|
|
28e2731594 | ||
|
|
b1a279cbcc | ||
|
|
352a6a741a | ||
|
|
109015567a | ||
|
|
9e0fa77ca2 | ||
|
|
335b11c698 | ||
|
|
8e433355e6 | ||
|
|
3504f017b9 | ||
|
|
cd2f8077fa | ||
|
|
d5b68a91d2 | ||
|
|
623c7dcea5 | ||
|
|
ecbd6d86cd | ||
|
|
7200344ace | ||
|
|
b313ac4daa | ||
|
|
f2f312b43a | ||
|
|
6f6d20e1ba | ||
|
|
3231c3d930 | ||
|
|
b604e21c69 | ||
|
|
3c66db9845 | ||
|
|
f6ab1f7f61 | ||
|
|
8e40465e86 | ||
|
|
37dffd0fce | ||
|
|
e7c0d94b44 | ||
|
|
8102142007 | ||
|
|
7c6dec5d47 | ||
|
|
dd10c0c5d0 | ||
|
|
34fadecc2c | ||
|
|
cb8867fcc1 | ||
|
|
092ed06833 | ||
|
|
6308f1c35d | ||
|
|
ce10c9f120 | ||
|
|
6c4736fc8f | ||
|
|
b301b791c7 | ||
|
|
19d34e2eb8 | ||
|
|
a3748af772 | ||
|
|
9b765ef696 | ||
|
|
8f493cccc4 | ||
|
|
31a033dff1 | ||
|
|
8c3337b88b | ||
|
|
7238243664 | ||
|
|
ba2b15ab24 | ||
|
|
28dc8822b7 | ||
|
|
358c5055e9 | ||
|
|
b6cd40e6d3 | ||
|
|
7d96d8070d | ||
|
|
d482fb5f26 | ||
|
|
60402ce1fc | ||
|
|
1e3950c847 | ||
|
|
ed550594da | ||
|
|
3bbae29f93 | ||
|
|
3b74f8cd9a | ||
|
|
e9bdb91e01 | ||
|
|
1aa024ed6b | ||
|
|
13e8d36e1a | ||
|
|
5606c23768 | ||
|
|
0b675d6c02 | ||
|
|
c1db3a36ad | ||
|
|
c59dbb4f9e | ||
|
|
df6b306fce | ||
|
|
9d45718e5f | ||
|
|
b91ed7a78a | ||
|
|
95386d777b | ||
|
|
635809c376 | ||
|
|
af6bb2a6aa | ||
|
|
a797494aa3 | ||
|
|
353dd7f796 | ||
|
|
1c00d64952 | ||
|
|
ff5cf3f4fa | ||
|
|
5b6b2f427a | ||
|
|
7877184bee | ||
|
|
e9cb37122e | ||
|
|
a425392a2b | ||
|
|
75acbcc115 | ||
|
|
30415cefbe | ||
|
|
1d06a0019f | ||
|
|
3686075a7f | ||
|
|
6c1c7e5cc0 | ||
|
|
c4f901b201 | ||
|
|
4b7acb1389 | ||
|
|
15b7169df4 | ||
|
|
861948bcf3 | ||
|
|
e5ffd39cf2 | ||
|
|
8b353da0d2 | ||
|
|
49bde82426 | ||
|
|
3e285aaec4 | ||
|
|
355fc576b1 | ||
|
|
a69d72aa20 | ||
|
|
e5d123c5d3 | ||
|
|
220eb33f88 | ||
|
|
5238850036 | ||
|
|
81ac963567 | ||
|
|
3c21a9a520 | ||
|
|
1dc1dd1f07 | ||
|
|
c9ea9bce81 | ||
|
|
9f08353d31 | ||
|
|
ce0c3626c2 | ||
|
|
06f46206db | ||
|
|
579f0c06af | ||
|
|
b12d92acc9 | ||
|
|
e700ce15e5 | ||
|
|
7dbef7d559 | ||
|
|
7e9cdd8b07 | ||
|
|
cee6bc6b5d | ||
|
|
cfd23c05b4 | ||
|
|
0c1acd72ca | ||
|
|
e2ca06dcca | ||
|
|
0828fd787d | ||
|
|
2e23ea68d4 | ||
|
|
4afa822bec | ||
|
|
f2ca9b40db | ||
|
|
4c2535cb22 | ||
|
|
d4ea8787c9 | ||
|
|
a4de04528a | ||
|
|
f60aae7499 | ||
|
|
de8f9e9eee | ||
|
|
cace9db12f | ||
|
|
ec2fb82836 | ||
|
|
afcfbf02ea | ||
|
|
cad04e07dd | ||
|
|
30f732138c | ||
|
|
04034bd03b | ||
|
|
6ec9a8d4c7 | ||
|
|
3f7882b467 | ||
|
|
a4511c1963 | ||
|
|
9d1f122717 | ||
|
|
5dd73d80d8 | ||
|
|
fce872bc1b | ||
|
|
df6c4c80c2 | ||
|
|
d2ff040cf8 | ||
|
|
a31af209cc | ||
|
|
3f8b3da52b | ||
|
|
6887f14ec6 | ||
|
|
3e0de5eaac | ||
|
|
61101a60f4 | ||
|
|
3529023bf9 | ||
|
|
d1d1a089a4 | ||
|
|
fa66358b1e | ||
|
|
2b533e4b91 | ||
|
|
d3530a8d80 | ||
|
|
6052eb3512 | ||
|
|
d17f7f7cad | ||
|
|
8bdc67ec3d | ||
|
|
4fabc27366 | ||
|
|
e4c7b0f17c | ||
|
|
5e8bfb017e | ||
|
|
7d20a01dba | ||
|
|
59dbf4496f | ||
|
|
12f40608e6 | ||
|
|
89832c296f | ||
|
|
f09bb88846 | ||
|
|
c518f59528 | ||
|
|
e9c74f9959 | ||
|
|
21b8e7f6e5 | ||
|
|
2ae9cd8634 | ||
|
|
cfee536b96 | ||
|
|
1c8fe3b24c | ||
|
|
84e23c397d | ||
|
|
f7baec2e65 | ||
|
|
378bab32f1 | ||
|
|
6cd8151cad | ||
|
|
541449e10f | ||
|
|
ca5a53fc24 | ||
|
|
f646d2a699 | ||
|
|
363e036bf0 | ||
|
|
e23f00f349 | ||
|
|
9600267bda | ||
|
|
a66b0e0151 | ||
|
|
3bfa00d5d2 | ||
|
|
6cbd2532cc | ||
|
|
47976af0d3 | ||
|
|
4dca52be85 | ||
|
|
62bb09300d | ||
|
|
f9e067abec | ||
|
|
1e62666406 | ||
|
|
0e0cdf15ef | ||
|
|
b124fdc092 | ||
|
|
5141b3c165 | ||
|
|
881d6e271e | ||
|
|
bd2418c438 | ||
|
|
8421c72c5c | ||
|
|
a80e21997c | ||
|
|
4369cbbac3 | ||
|
|
89f76d7899 | ||
|
|
ef68f84787 | ||
|
|
2c1f70fbe9 | ||
|
|
b2f5757f8d | ||
|
|
6b97b4eb20 | ||
|
|
645c10c11f | ||
|
|
571bcf07b0 | ||
|
|
63de65be45 | ||
|
|
a3446720a2 | ||
|
|
3c4c2ad4e0 | ||
|
|
077a525961 | ||
|
|
5be79eb26e | ||
|
|
ddc19ab699 | ||
|
|
ddfca5a29b | ||
|
|
c19166be1c | ||
|
|
daad61443c | ||
|
|
4b0c01158d | ||
|
|
f97f1d532e | ||
|
|
e15755fef0 | ||
|
|
ea88998325 | ||
|
|
74d971aa8a | ||
|
|
d41d868a8d | ||
|
|
555cc26cbf | ||
|
|
ab4215080b | ||
|
|
9502f5acd7 | ||
|
|
b03879403f | ||
|
|
ee4ac81677 | ||
|
|
b69fc8c306 | ||
|
|
ee6c31332d | ||
|
|
9fa16bd5fc | ||
|
|
c77ed5fcb0 | ||
|
|
822be17fb9 | ||
|
|
7e3b13ea2d | ||
|
|
f8fb48fb32 | ||
|
|
4bf46268da | ||
|
|
b7ea73b3c2 | ||
|
|
9fbc54314d | ||
|
|
cf8ab29a17 | ||
|
|
51cadd2d49 | ||
|
|
2bae8e129e | ||
|
|
9d55ad3af6 | ||
|
|
36cd504783 | ||
|
|
49f13b9b90 | ||
|
|
adb0739dfe | ||
|
|
340cb940e3 | ||
|
|
8711f2a1c5 | ||
|
|
7f35aab071 | ||
|
|
ecd167d2f9 | ||
|
|
220fd30830 | ||
|
|
5cba10446e | ||
|
|
a9bdb15205 | ||
|
|
c5f6a90f54 | ||
|
|
46f9aefb04 | ||
|
|
fdcad9c154 | ||
|
|
027025361a | ||
|
|
f1245153b9 | ||
|
|
570b8be022 | ||
|
|
86a773674a | ||
|
|
75fd0ee185 | ||
|
|
cc43238bd1 | ||
|
|
c0a6beecea | ||
|
|
c77eebb035 | ||
|
|
b1efb86b28 | ||
|
|
0707449c8f | ||
|
|
0f8a84f67e | ||
|
|
a475783b00 | ||
|
|
67413015e8 | ||
|
|
3a311a47af | ||
|
|
9ccd802126 | ||
|
|
0acba7cd22 | ||
|
|
3cdb8e7a81 | ||
|
|
d3efee2ea1 | ||
|
|
4ec274e748 | ||
|
|
3b07c72f88 | ||
|
|
0c5820a98f | ||
|
|
86beadc0ed | ||
|
|
be62d64dba | ||
|
|
112363031a | ||
|
|
48dc3552a6 | ||
|
|
663814c9ef | ||
|
|
bd892e6a63 | ||
|
|
4fd2c09845 | ||
|
|
0eab31bdf5 | ||
|
|
c6af22b97e | ||
|
|
b2a5110672 | ||
|
|
c628992ea6 | ||
|
|
c65d868e09 | ||
|
|
aeb48b2ecc | ||
|
|
cefec1a663 | ||
|
|
e7ad830aa8 | ||
|
|
b27eed265a | ||
|
|
3abe26473c | ||
|
|
023107226c | ||
|
|
8b109cfe40 | ||
|
|
b48e97d406 | ||
|
|
6c91cfeb90 | ||
|
|
bfd1f25972 | ||
|
|
8c0defce09 | ||
|
|
a1e88cfa05 | ||
|
|
443f5ffbcc | ||
|
|
b8bc94306d | ||
|
|
d9795ff22f | ||
|
|
c4108007cd | ||
|
|
f3db23a41e | ||
|
|
4741a75c92 | ||
|
|
301756ba03 | ||
|
|
3b2703a5e5 | ||
|
|
2a601f06cb | ||
|
|
adc3a56552 | ||
|
|
4d9a29bddd | ||
|
|
666e02f0c3 | ||
|
|
6aaec19c1c | ||
|
|
1091e1b740 | ||
|
|
d06c605421 | ||
|
|
43de823058 | ||
|
|
02d0aef611 | ||
|
|
5596661ce8 | ||
|
|
2379cb8d67 | ||
|
|
8c0ebe0841 | ||
|
|
fd868bac84 | ||
|
|
ebcbb29a0f | ||
|
|
00ff0a43a7 | ||
|
|
3d3f23ec9e | ||
|
|
d484219c48 | ||
|
|
dd4c97393e | ||
|
|
07b8ff25a7 | ||
|
|
0d5c3c5080 | ||
|
|
75b4429f73 | ||
|
|
34ef6bd18d | ||
|
|
c915313ec9 | ||
|
|
12a095a1d6 | ||
|
|
dc000f640a | ||
|
|
aa1c5b2be3 | ||
|
|
1d4ec3c50d | ||
|
|
ebfeef52f4 | ||
|
|
c595fd7f94 | ||
|
|
421052f88a | ||
|
|
603681fbe6 | ||
|
|
f442185aa5 | ||
|
|
ca9e739465 | ||
|
|
53a1c4283b | ||
|
|
93dd768234 | ||
|
|
c9c4d6bc7e | ||
|
|
81e10f8939 | ||
|
|
4dd753de52 | ||
|
|
79df63d319 | ||
|
|
ec54831162 | ||
|
|
c8f3e8ab4d | ||
|
|
4be8524d80 | ||
|
|
0d3146b51d | ||
|
|
f95d843969 | ||
|
|
28aee8c493 | ||
|
|
de3ea82eb9 | ||
|
|
268ba3d069 | ||
|
|
309d6558fb | ||
|
|
c08fdfc868 | ||
|
|
1b28e6af3e | ||
|
|
8655e33e60 | ||
|
|
50579fef84 | ||
|
|
e39299bfe2 | ||
|
|
d1ab2443f1 | ||
|
|
658cf368bb | ||
|
|
fd36ce59f6 | ||
|
|
95b3b87672 | ||
|
|
0d07d81802 | ||
|
|
923937b530 | ||
|
|
09492193c4 | ||
|
|
40b26a81a0 | ||
|
|
4293a0ba8c | ||
|
|
6c2f3486fc | ||
|
|
3c7512f64a | ||
|
|
84219d3d70 | ||
|
|
05d3727335 | ||
|
|
ee77c3b113 | ||
|
|
fcaf485e0b | ||
|
|
bd83469bb1 | ||
|
|
90f111b24f | ||
|
|
7d1034c569 | ||
|
|
236c17176c | ||
|
|
6ee4c10e8f | ||
|
|
3798634028 | ||
|
|
567ba5ccd4 | ||
|
|
ae2ee1821a | ||
|
|
805b1e4fa3 | ||
|
|
d92c10da56 | ||
|
|
6659f6d367 | ||
|
|
fe416ba15c | ||
|
|
de66708b24 | ||
|
|
2ca3e0b8bc | ||
|
|
ae04a0a760 | ||
|
|
c28168c970 | ||
|
|
46b2ed2507 | ||
|
|
22843ffc70 | ||
|
|
e1b6368343 | ||
|
|
62dae50d70 | ||
|
|
43a8ed472b | ||
|
|
d87878c232 | ||
|
|
ab7dee49b0 | ||
|
|
dca115506d | ||
|
|
be17fba0c6 | ||
|
|
cd58aa5efe | ||
|
|
946833d2cc | ||
|
|
eb42d09849 | ||
|
|
9d00492750 | ||
|
|
b6711d6ab9 | ||
|
|
7bc46de8aa | ||
|
|
a4f4fb2d73 | ||
|
|
a181b56ea7 | ||
|
|
d0b743d955 | ||
|
|
a985b748e9 | ||
|
|
44cb8aaafe | ||
|
|
51f5d1b3c4 | ||
|
|
36e0d6f787 | ||
|
|
3d0065bdcf | ||
|
|
7bf8071095 | ||
|
|
30d39f8e10 | ||
|
|
20d3ef7de6 | ||
|
|
86e5dae4d1 | ||
|
|
d89b1d4871 | ||
|
|
080e6fb22a | ||
|
|
e1cd71616d | ||
|
|
c92e11dad5 | ||
|
|
b52e8747fa | ||
|
|
14305748f0 | ||
|
|
44f8112e53 | ||
|
|
6a90b1d40a | ||
|
|
b42ec3e810 | ||
|
|
28875ce304 | ||
|
|
9b99e8ab70 | ||
|
|
98872a8fdb | ||
|
|
ce4a295008 | ||
|
|
bc1babb5b5 | ||
|
|
d61242d85d | ||
|
|
99d7105357 | ||
|
|
be8a9c5f07 | ||
|
|
530e74c70b | ||
|
|
0a337756ba | ||
|
|
26fe0a7684 | ||
|
|
9c7e451c03 | ||
|
|
8df1455f25 | ||
|
|
9d9377f65d | ||
|
|
8b523fab8b | ||
|
|
6453ae0968 | ||
|
|
1cfd47a258 | ||
|
|
8e2069c554 | ||
|
|
6b8778a63c | ||
|
|
aaa8c440fe | ||
|
|
2dc5dec83c | ||
|
|
1eca2b83ed | ||
|
|
48e6f3bb23 | ||
|
|
0ad9e17196 | ||
|
|
9398cdaac1 | ||
|
|
2f19d4a834 | ||
|
|
99a186d01b | ||
|
|
40ef233d24 | ||
|
|
7c3ea193ff | ||
|
|
7902b646ff | ||
|
|
1c453ae147 | ||
|
|
cf5714ba73 | ||
|
|
d655340634 | ||
|
|
8d4ac031c3 | ||
|
|
a1ded3a339 | ||
|
|
4a0e47dbac | ||
|
|
510d266da8 | ||
|
|
35dfb36884 | ||
|
|
b88f4d2ba6 | ||
|
|
50318da879 | ||
|
|
575487a0e2 | ||
|
|
69d3ccaed2 | ||
|
|
170859a112 | ||
|
|
7fdcb106a5 | ||
|
|
14d4ddb752 | ||
|
|
428e59a844 | ||
|
|
1c8d895fc0 | ||
|
|
fbf3fb825b | ||
|
|
16e07ae016 | ||
|
|
d1b9db38c7 | ||
|
|
395f0fc5f3 | ||
|
|
143e4cd077 | ||
|
|
f777a2fab4 | ||
|
|
dad3012ec3 | ||
|
|
d45209edb2 | ||
|
|
e89489453d | ||
|
|
ed6c8194a7 | ||
|
|
83fe17c6ec | ||
|
|
c00dcc8f39 | ||
|
|
e118f4a3b9 | ||
|
|
5e28d0f96a | ||
|
|
3af23f6792 | ||
|
|
3a41b929c9 | ||
|
|
105f22969c | ||
|
|
e4a88a7c13 | ||
|
|
b0255040c6 | ||
|
|
f1e842e12a | ||
|
|
d756cf3e9f | ||
|
|
146619134d | ||
|
|
372030071e | ||
|
|
62a06fa0f9 | ||
|
|
e2bcca2fbd | ||
|
|
4568af9542 | ||
|
|
b50d486a63 | ||
|
|
0ae3fc608b | ||
|
|
6024e8d832 | ||
|
|
f38f4f401b | ||
|
|
3b2ae85009 | ||
|
|
faf4150d1e | ||
|
|
fb64f00640 | ||
|
|
3d336b328a | ||
|
|
f9cf29e0b6 | ||
|
|
cbd038f30f | ||
|
|
2aeb75a779 | ||
|
|
2f8eaf6bea | ||
|
|
fb7a5dec1b | ||
|
|
e61bac039a | ||
|
|
b3be9ef428 | ||
|
|
5a6b600ace | ||
|
|
e58ca686e3 | ||
|
|
6f4b1ba4b3 | ||
|
|
cdc45630ae | ||
|
|
7947ff1ae4 | ||
|
|
33bae52fa1 | ||
|
|
3ee45c69a7 | ||
|
|
179d285564 | ||
|
|
a2e8e96c71 | ||
|
|
5043815d48 | ||
|
|
1640f06e13 | ||
|
|
62ea93837c | ||
|
|
446f82888c | ||
|
|
6f1aeb47fd | ||
|
|
1f7c1b4f43 | ||
|
|
3fa0217c4b | ||
|
|
2dd30f2b77 | ||
|
|
6e23c8b4c0 | ||
|
|
72aa63adce | ||
|
|
e65e8be59e | ||
|
|
7aa4dfb240 | ||
|
|
bd324233a0 | ||
|
|
f1a9b68022 | ||
|
|
dda1da4576 | ||
|
|
5b7aa9c1cf | ||
|
|
a28aaceaad | ||
|
|
2bb200af87 | ||
|
|
97f1efbb72 | ||
|
|
bf8b6f4c2c | ||
|
|
bd33c200dc | ||
|
|
bc6baf1be0 | ||
|
|
dc8d5106f9 | ||
|
|
8c0dfe2f3d | ||
|
|
4e1be9bee6 | ||
|
|
4c5285e094 | ||
|
|
0838feeb82 | ||
|
|
ae791c8634 | ||
|
|
09f480318c | ||
|
|
4c5be5f07f | ||
|
|
9c1ffdbb82 | ||
|
|
18a63e34dd | ||
|
|
ff0bcfef8a | ||
|
|
4980b71ba3 | ||
|
|
b5bf5f4325 | ||
|
|
f9788ea7cf | ||
|
|
83644dab85 | ||
|
|
d94cf72da2 | ||
|
|
e98561ceb1 | ||
|
|
76f37373e0 | ||
|
|
61a06992c3 | ||
|
|
ddcba93eea | ||
|
|
bb969d8dc6 | ||
|
|
2383e851e2 | ||
|
|
330a767fd7 | ||
|
|
2b902de6fd | ||
|
|
85e1350af8 | ||
|
|
c09800790b | ||
|
|
25fd343069 | ||
|
|
518487e3df | ||
|
|
a02d9c8463 | ||
|
|
8beeba7c0c | ||
|
|
50fb49f0c3 | ||
|
|
4dcaa24758 | ||
|
|
3fbdf6f022 | ||
|
|
aa9ba289bb | ||
|
|
3b6d8987db | ||
|
|
6e3df9f847 | ||
|
|
efe0e6af22 | ||
|
|
00de9bf16d | ||
|
|
1743110a70 | ||
|
|
0352a8e028 | ||
|
|
c601bb794b | ||
|
|
42865486f1 | ||
|
|
44f5cf40ef | ||
|
|
c3ab378ac5 | ||
|
|
cdcbfb24c4 | ||
|
|
e05e2fd663 | ||
|
|
6639cab1ae | ||
|
|
8241f0999a | ||
|
|
f3a5e3702d | ||
|
|
46701a176d | ||
|
|
26a29f20c3 | ||
|
|
18cd45d257 | ||
|
|
f0a533a77a | ||
|
|
619a9aeb6c | ||
|
|
7bfa5876ed | ||
|
|
f95ab6ee57 | ||
|
|
e75f19e9c0 | ||
|
|
1c212f6c30 | ||
|
|
141419056d | ||
|
|
aabfe49cb9 | ||
|
|
a3b631f9e9 | ||
|
|
18165eb50d | ||
|
|
061c462f0b | ||
|
|
5f79d665d9 | ||
|
|
f0cc0a76a9 | ||
|
|
dd4674e486 | ||
|
|
0019959eec | ||
|
|
3e9c38697d | ||
|
|
e3b7c41199 | ||
|
|
a2c808c8ce | ||
|
|
da7e17aa38 | ||
|
|
02df3759df | ||
|
|
4fef500795 | ||
|
|
07ece452b3 | ||
|
|
b8cf02ca68 | ||
|
|
3db798a82a | ||
|
|
45cc0cedbd | ||
|
|
2efade123e | ||
|
|
fc393f743f | ||
|
|
0e99e7e9b9 | ||
|
|
7a95850c1b | ||
|
|
549355bb29 | ||
|
|
55aa8ee3b1 | ||
|
|
1c22fc367e | ||
|
|
5ea8d62aa4 | ||
|
|
baebc2fbe9 | ||
|
|
8c69260972 | ||
|
|
30f992c6a8 | ||
|
|
dcaaae366b | ||
|
|
284035823f | ||
|
|
be8ff92414 | ||
|
|
a4c846a424 | ||
|
|
451e418b18 | ||
|
|
4e13b1a83c | ||
|
|
9d2e9887af | ||
|
|
dc73c2e97d | ||
|
|
a624121095 | ||
|
|
9d9c79179b | ||
|
|
b7479651e1 | ||
|
|
2fc0ccbfe0 | ||
|
|
f86ad1dce4 | ||
|
|
f0181d92cd | ||
|
|
5ac6a30c56 | ||
|
|
96d8a382e8 | ||
|
|
7c32af4649 | ||
|
|
03dbb3a403 | ||
|
|
a570e4c7a0 | ||
|
|
539c47bd3b | ||
|
|
b6d9018ebd | ||
|
|
c929888e39 | ||
|
|
af946ff13e | ||
|
|
0039dc18e1 | ||
|
|
4d6ab53336 | ||
|
|
c7f6684eed | ||
|
|
b71ecc8e89 | ||
|
|
3537153b91 | ||
|
|
9382f66f87 | ||
|
|
656f5f112c | ||
|
|
9181861f47 | ||
|
|
1ab73e0742 | ||
|
|
57686d9df1 | ||
|
|
ca177cc3b9 | ||
|
|
d8dc8d8623 | ||
|
|
5548ab62ac | ||
|
|
d6d82c3138 | ||
|
|
2185839236 | ||
|
|
24d58f278a | ||
|
|
f80be96cf9 | ||
|
|
6c89c6c8ae | ||
|
|
b74b55fa4a | ||
|
|
09564102e7 | ||
|
|
d436a6e676 | ||
|
|
bec3a327a7 | ||
|
|
d329df70f3 | ||
|
|
1af9f4061e | ||
|
|
0d012f85cb | ||
|
|
e3b213c398 | ||
|
|
d9f0603271 | ||
|
|
86a625cb40 | ||
|
|
f22232de5d | ||
|
|
7ad3748a46 | ||
|
|
66b2562d03 | ||
|
|
b197322cd8 | ||
|
|
9e5ef974a7 | ||
|
|
08a001fbd1 | ||
|
|
54ae6dce0b | ||
|
|
a90ef201c7 | ||
|
|
2de0da87fa | ||
|
|
53e08e75fe | ||
|
|
6b5236f52e | ||
|
|
78e34f0d9f | ||
|
|
6aedd0f425 | ||
|
|
5ff0d850d7 | ||
|
|
cd73e34ccc | ||
|
|
107462e42e | ||
|
|
e6c2d22700 | ||
|
|
889ddcef7e | ||
|
|
68a6a0c40e | ||
|
|
969018db37 | ||
|
|
fba1471ec4 | ||
|
|
8b72ac7f80 | ||
|
|
77a6aa487b | ||
|
|
fd99c2197b | ||
|
|
9c91f062b9 | ||
|
|
537ca030b2 | ||
|
|
b00dcdec0d | ||
|
|
57bcd376b4 | ||
|
|
8d4d8648c6 | ||
|
|
35d177b67b | ||
|
|
40882443c2 | ||
|
|
05f19cad78 | ||
|
|
7249f277b2 | ||
|
|
849124f177 | ||
|
|
f5c7a11da5 | ||
|
|
043a79189d | ||
|
|
5ed43fd17d | ||
|
|
220cd4d6b8 | ||
|
|
f692e6c011 | ||
|
|
f48365929e | ||
|
|
56219bf096 | ||
|
|
5ad3849bb6 | ||
|
|
4af9124162 | ||
|
|
92fba9a2bf | ||
|
|
63569be41d | ||
|
|
46325655e1 | ||
|
|
85d13c4c5a | ||
|
|
af87131cc0 | ||
|
|
2505cb40ac | ||
|
|
4ec42a55d6 | ||
|
|
7d3c3df207 | ||
|
|
362d48aa98 | ||
|
|
dea87d098d | ||
|
|
901a74e252 | ||
|
|
8705e48e0a | ||
|
|
ed5adc21c2 | ||
|
|
fbaebc020f | ||
|
|
918ca28d2b | ||
|
|
7a12f1bddd | ||
|
|
4ea19ae078 | ||
|
|
71d30b6819 | ||
|
|
53fc2f32d8 | ||
|
|
e07654299b | ||
|
|
f127c959a1 | ||
|
|
a24dfddc2a | ||
|
|
534d8d30fc | ||
|
|
868a4fd49e | ||
|
|
900e71f78f | ||
|
|
3416861cab | ||
|
|
25ae1b8397 | ||
|
|
3dd4fbd76d | ||
|
|
778cee4cdf | ||
|
|
9d20c887df | ||
|
|
a1c86b3350 | ||
|
|
a4a8739748 | ||
|
|
ffba5e0aec | ||
|
|
8fd56ef9dd | ||
|
|
849de88e68 | ||
|
|
c89a462d0c | ||
|
|
5d0668b00b | ||
|
|
7da9e33c4d | ||
|
|
dcc99802ec | ||
|
|
552aba997c | ||
|
|
611457c0e7 | ||
|
|
decea4a739 | ||
|
|
0f2425ce53 | ||
|
|
bc155af255 | ||
|
|
2d2a4f5776 | ||
|
|
284274b37e | ||
|
|
7290f9b301 | ||
|
|
454f563bce | ||
|
|
755f4b83f6 | ||
|
|
8e1ed4015b | ||
|
|
d31faabc24 | ||
|
|
b73dce33aa | ||
|
|
7ac1d14eeb | ||
|
|
9ec6d5be7a | ||
|
|
817d63597e | ||
|
|
102384e170 | ||
|
|
7d407de22e | ||
|
|
41edac5826 | ||
|
|
f551dc76d0 | ||
|
|
c95a7c2a04 | ||
|
|
a6b9dbfbe4 | ||
|
|
615e5dd118 | ||
|
|
046bbb3a48 | ||
|
|
59ec17a353 | ||
|
|
fec98e7f69 | ||
|
|
68a125491b | ||
|
|
97d4114e38 | ||
|
|
d267c43556 | ||
|
|
e5480b99be | ||
|
|
e72a557b96 | ||
|
|
a6f3094c9a | ||
|
|
5ab5cc327f | ||
|
|
74007a1d45 | ||
|
|
37eb3dd8f5 | ||
|
|
fbcf082ca7 | ||
|
|
cc9ccc4e9b | ||
|
|
7425e001db | ||
|
|
d9ee174dd3 | ||
|
|
e9927806d4 | ||
|
|
38db3508a5 | ||
|
|
d1b5c3e648 | ||
|
|
02e2c809a8 | ||
|
|
8cd05275f0 | ||
|
|
fe0dee1196 | ||
|
|
05d8c27918 | ||
|
|
06e15fc149 | ||
|
|
0f853c86da | ||
|
|
0fdfd1f2c2 | ||
|
|
74f1154e5e | ||
|
|
af884010d1 | ||
|
|
fda4db71bf | ||
|
|
669ccc40a1 | ||
|
|
358212749b | ||
|
|
d8b56042c3 | ||
|
|
6f48a0a82a | ||
|
|
2b04cf4ac3 | ||
|
|
d6437a337f | ||
|
|
61fa6f38a8 | ||
|
|
ccce6a30bb | ||
|
|
1fd4ebe53e | ||
|
|
2e8322e99b | ||
|
|
5b40254e3b | ||
|
|
0df3473337 | ||
|
|
2b5da3ef34 | ||
|
|
d01958a6bf | ||
|
|
a6ed4afdae | ||
|
|
b51e664543 | ||
|
|
721f18a7f4 | ||
|
|
2a68c3cc7b | ||
|
|
71a6ebaf43 | ||
|
|
c7128133d6 | ||
|
|
829ef271e3 | ||
|
|
cb06d3a19a | ||
|
|
be452aafde | ||
|
|
33b7d75d8a | ||
|
|
8c27ca3e8b | ||
|
|
eface83716 | ||
|
|
212dbb277e | ||
|
|
53fd09814a | ||
|
|
b399c924b7 | ||
|
|
e707d6b26e | ||
|
|
4ba04fa7db | ||
|
|
5166d73b4d | ||
|
|
826e4807dc | ||
|
|
4691142f80 | ||
|
|
9d92834ee3 | ||
|
|
4f3129ec28 | ||
|
|
fb65e98fa3 | ||
|
|
90a5c175ed | ||
|
|
872e7cf87b | ||
|
|
638db77ca1 | ||
|
|
fe94016289 | ||
|
|
184b9d1e6c | ||
|
|
e08810a12f | ||
|
|
303d245e0f | ||
|
|
a16da3b45e | ||
|
|
2bff656f00 | ||
|
|
fbc858b43c | ||
|
|
4ac312fd07 | ||
|
|
b1d563c874 | ||
|
|
6ebb36b2eb | ||
|
|
3691ee5861 | ||
|
|
dc38f21294 | ||
|
|
8971a924f1 | ||
|
|
18b218c6c9 | ||
|
|
a25d76ef6e | ||
|
|
69d1287254 | ||
|
|
f102b130db | ||
|
|
fc1204c914 | ||
|
|
efa20cc7bd | ||
|
|
e28c1e436d | ||
|
|
90283ef29c | ||
|
|
156da2b794 | ||
|
|
9ba7cf0835 | ||
|
|
fb23758d12 | ||
|
|
8125fee3f9 | ||
|
|
e3891246b9 | ||
|
|
a6e5edcf53 | ||
|
|
4340a48633 | ||
|
|
4d0ae6b1ef | ||
|
|
53416172e7 | ||
|
|
2b1726614b | ||
|
|
dd013ac0b2 | ||
|
|
3934d9029e | ||
|
|
fba96d024f | ||
|
|
35b04ffa9c | ||
|
|
e614faa99b | ||
|
|
fd55f2cbfa | ||
|
|
f54418bdae | ||
|
|
786e44d1d2 | ||
|
|
58d153e5ff | ||
|
|
0bf724f447 | ||
|
|
c88680b495 | ||
|
|
d24e51bc86 | ||
|
|
3c7a2f78cf | ||
|
|
8abee6504f | ||
|
|
a09a1b814b | ||
|
|
bf950ee6e1 | ||
|
|
40548926e6 | ||
|
|
f275f83de0 | ||
|
|
8a0915ffb1 | ||
|
|
505b126888 | ||
|
|
96380a50da | ||
|
|
d1efec4539 | ||
|
|
67bc66fedf | ||
|
|
d89ec89d51 | ||
|
|
5dbf5db4ff | ||
|
|
7903ed1f52 | ||
|
|
c8f10703b7 | ||
|
|
db6b5f8950 | ||
|
|
74973bc5b5 | ||
|
|
7c0b86a9cd | ||
|
|
c6007aa9e6 | ||
|
|
f01a81ee9c | ||
|
|
005ded41c3 | ||
|
|
1a148eee7c | ||
|
|
e4c3ef0262 | ||
|
|
6bb2b76e25 | ||
|
|
e71aff9d94 | ||
|
|
490df4f5fe | ||
|
|
087fae1b15 | ||
|
|
2aff218356 | ||
|
|
b98cd915a4 | ||
|
|
3349982312 | ||
|
|
5783aa99f1 | ||
|
|
cab498e376 | ||
|
|
6b9bca893b | ||
|
|
c67f128f15 | ||
|
|
4cef3adc90 | ||
|
|
acd4083399 | ||
|
|
7cbfe93a02 | ||
|
|
54ca68e4b3 | ||
|
|
b474eefd87 | ||
|
|
c5295f4d72 | ||
|
|
306b90399c | ||
|
|
7dadab95b2 | ||
|
|
ee2bc99e4c | ||
|
|
935416de45 | ||
|
|
3f49271db6 | ||
|
|
956a5ae906 | ||
|
|
40b7ecc845 | ||
|
|
92983aa185 | ||
|
|
6c61f1d261 | ||
|
|
aedcae840d | ||
|
|
ffdb198247 | ||
|
|
3a1fcbef1c | ||
|
|
ffa0bc294a | ||
|
|
a65dcb48b4 | ||
|
|
b971b13362 | ||
|
|
d77dea733f | ||
|
|
fd5c3e831d | ||
|
|
c3040fdfc3 | ||
|
|
2612cd7f1c | ||
|
|
3fe0a7bf6b | ||
|
|
a6df492fff | ||
|
|
72208e052a | ||
|
|
f6242d46b1 | ||
|
|
55c4a925ba | ||
|
|
9633af4e25 | ||
|
|
55d6434daa | ||
|
|
1b3387ca1a | ||
|
|
6c552a9d62 | ||
|
|
4db25605e7 | ||
|
|
a61bb6ab1f | ||
|
|
31ff31d3dd | ||
|
|
d665cce739 | ||
|
|
dd46e99e66 | ||
|
|
adf0178bb7 | ||
|
|
6ad2cf2003 | ||
|
|
68ca2abd0c | ||
|
|
d73a9e4734 | ||
|
|
73c0c0bf44 | ||
|
|
72a76599e4 | ||
|
|
b9f9e5853e | ||
|
|
fa6e918fc7 | ||
|
|
53e969e894 | ||
|
|
6d0e54d87e | ||
|
|
626e878861 | ||
|
|
52575f6ad6 | ||
|
|
ca13678105 | ||
|
|
355db3ab9b | ||
|
|
04f43cb684 | ||
|
|
52ab1310be | ||
|
|
56c95eadea | ||
|
|
1df5472855 | ||
|
|
9aa7074600 | ||
|
|
69647f73f0 | ||
|
|
09ef7c7106 | ||
|
|
d9eb188b7a | ||
|
|
083395ee53 | ||
|
|
2d60dab13c | ||
|
|
4fa7846f00 | ||
|
|
9fcdbec5c9 | ||
|
|
979f8383d8 | ||
|
|
2cddd3cf2b | ||
|
|
c65a9b3001 | ||
|
|
066ddd3e09 | ||
|
|
6cdd85283b | ||
|
|
5780d9d834 | ||
|
|
097b516dc5 | ||
|
|
b73dbee7e6 | ||
|
|
b8e4a2e7c0 | ||
|
|
0d4542a3f1 | ||
|
|
7c4d28d55a | ||
|
|
1143331b4d | ||
|
|
e4b956b091 | ||
|
|
e3d2e6dd64 | ||
|
|
6accc2eff6 | ||
|
|
c525406516 | ||
|
|
6056fdbddc | ||
|
|
2f52b5d354 | ||
|
|
e16ab876aa | ||
|
|
3e8f36e9f3 | ||
|
|
3135775250 | ||
|
|
77b0c69112 | ||
|
|
ec89bb70c7 | ||
|
|
cd7e9974df | ||
|
|
354dee67dc | ||
|
|
122b7baa73 | ||
|
|
c5e5666b64 | ||
|
|
7b6f11fa52 | ||
|
|
2481676c46 | ||
|
|
164dab49ac | ||
|
|
e1a2ed0436 | ||
|
|
5b73b68eb5 | ||
|
|
cd21f14106 | ||
|
|
65fba7936c | ||
|
|
ba648fa10c | ||
|
|
ae755db2d2 | ||
|
|
677047c80b | ||
|
|
0d93a6aa41 | ||
|
|
84eb978731 | ||
|
|
ac0f984136 | ||
|
|
79965ab4b3 | ||
|
|
492476dfe4 | ||
|
|
62ac168226 | ||
|
|
09616dbe25 | ||
|
|
fced60c2b5 | ||
|
|
b76060570e | ||
|
|
eb15bce24b | ||
|
|
52814266b8 | ||
|
|
f845ec05e0 | ||
|
|
29fb02c886 | ||
|
|
072e854a71 | ||
|
|
cae0a5f603 | ||
|
|
7c6d8ca222 | ||
|
|
f6be50f15a | ||
|
|
c35d54d092 | ||
|
|
323dad2a1c | ||
|
|
62aefc4f68 | ||
|
|
6a7eb8b3eb | ||
|
|
eb549f2631 | ||
|
|
9207eb69ee | ||
|
|
866df0540b | ||
|
|
04e04a1aa6 | ||
|
|
6a66e39d5b | ||
|
|
f2b2728be7 | ||
|
|
39b8f28fc4 | ||
|
|
e1ccc0b215 | ||
|
|
87e339850d | ||
|
|
79c9b6ac77 | ||
|
|
aeb2297f1f | ||
|
|
3b59bb5c09 | ||
|
|
bc4bac921f | ||
|
|
f917882a84 | ||
|
|
6a67d1cf69 | ||
|
|
041b3587bf | ||
|
|
0eef7a129c | ||
|
|
4b635f06e3 | ||
|
|
279111a8e2 | ||
|
|
67674835da | ||
|
|
732e9eb1c3 | ||
|
|
b6af9aa587 | ||
|
|
a9027c0f06 | ||
|
|
d780fa18a5 | ||
|
|
d5626d6e2f | ||
|
|
52dcbfe1a4 | ||
|
|
bf0ee3d315 | ||
|
|
0237e78c1e | ||
|
|
44b8c6abf7 | ||
|
|
33e1acd344 | ||
|
|
c54cb61f14 | ||
|
|
734b204709 | ||
|
|
b7d9c5e4ff | ||
|
|
e698b457b9 | ||
|
|
5258c21656 | ||
|
|
1ca9a3d14e | ||
|
|
f23bec9a35 | ||
|
|
62a1acd1f4 | ||
|
|
fa6e3fe567 | ||
|
|
b71b62ee35 | ||
|
|
410b4939a4 | ||
|
|
62c0071f29 | ||
|
|
f043a41005 | ||
|
|
2e9da57036 | ||
|
|
d83cd37984 | ||
|
|
bad8b0ebbb | ||
|
|
4535e65948 | ||
|
|
3b413c2ee2 | ||
|
|
427ae56333 | ||
|
|
658fd5ad6e | ||
|
|
11830bb51c | ||
|
|
75c98429bf | ||
|
|
f77ea1b3a5 | ||
|
|
0a8bd96d33 | ||
|
|
68f37fc11f | ||
|
|
d6775cda69 | ||
|
|
43c6e07bac | ||
|
|
4901e9080c | ||
|
|
48049a5ea3 | ||
|
|
bd7260f0ff | ||
|
|
6c0d54394f | ||
|
|
ce5dacbf3f | ||
|
|
08aaa5e2c0 | ||
|
|
42c0e438d5 | ||
|
|
e4df146043 | ||
|
|
27b7dae113 | ||
|
|
293d574ce7 | ||
|
|
56b3b35556 | ||
|
|
a7a0e85a46 | ||
|
|
95c0106fdd | ||
|
|
6612338fc1 | ||
|
|
c276a1541f | ||
|
|
cc96a5bbdb | ||
|
|
0810561a8a | ||
|
|
82a5c43b94 | ||
|
|
d38f36ef44 | ||
|
|
f9533440c7 | ||
|
|
41a186b051 | ||
|
|
4e6a44253c | ||
|
|
ebda77cd43 | ||
|
|
1a1e86521f | ||
|
|
1b4740dae3 | ||
|
|
91fc8df84e | ||
|
|
e6ecf1fa30 | ||
|
|
183a6f1b3a | ||
|
|
3c2d59e272 | ||
|
|
fd80e3eaf7 | ||
|
|
4928c331a8 | ||
|
|
3ad75e54cb | ||
|
|
a2cf3ab42e | ||
|
|
e24814ee2f | ||
|
|
37b42e6e17 | ||
|
|
30ebb0f4d4 | ||
|
|
8e059c64b5 | ||
|
|
395de069c2 | ||
|
|
4c22f37d54 | ||
|
|
a73a40133d | ||
|
|
6591af58ea | ||
|
|
58568d4ef6 | ||
|
|
5295593bf8 | ||
|
|
24d031d578 | ||
|
|
7141bf0358 | ||
|
|
c5d707cf0a | ||
|
|
dfcf66b43e | ||
|
|
fa6ee62cf0 | ||
|
|
1428d90361 | ||
|
|
c413c22201 | ||
|
|
9b6adecd62 | ||
|
|
b3540cf539 | ||
|
|
f8650c9c0b | ||
|
|
bf2e5768d6 | ||
|
|
18c82e79b5 | ||
|
|
d69d24a5b2 | ||
|
|
342729179d | ||
|
|
0537449335 | ||
|
|
df90311453 | ||
|
|
876579ea3b | ||
|
|
e83081380e | ||
|
|
9daeaf7562 | ||
|
|
e6be11c17f | ||
|
|
b52e1e8be3 | ||
|
|
948bbe9136 | ||
|
|
ced61da33a | ||
|
|
a0f4383d41 | ||
|
|
5a527dfa2c | ||
|
|
49fc475f9f | ||
|
|
83c377270e | ||
|
|
7ffaef0de6 | ||
|
|
dd151480a8 | ||
|
|
ad3121d367 | ||
|
|
c1525ebc69 | ||
|
|
30277cd81f | ||
|
|
466ec27ffe | ||
|
|
85c757b035 | ||
|
|
712687370a | ||
|
|
b68ba22df3 | ||
|
|
d9652e2a0b | ||
|
|
a5b757b251 | ||
|
|
0bc05a60b0 | ||
|
|
db275f885a | ||
|
|
9e483d902f | ||
|
|
801f843f8a | ||
|
|
77ffb93cbe | ||
|
|
bf73ea7f5d | ||
|
|
9b23d0ab29 | ||
|
|
908cdd2c78 | ||
|
|
f4f61a5787 | ||
|
|
6db09a2736 | ||
|
|
b21801d505 | ||
|
|
2dbedc245c | ||
|
|
58426613f6 | ||
|
|
ef19e851e3 | ||
|
|
5a1b16a601 | ||
|
|
4eef9cd9bc | ||
|
|
79b5c018ea | ||
|
|
15651a4356 | ||
|
|
7be476cce0 | ||
|
|
bb017c5f6d | ||
|
|
c51dc4594d | ||
|
|
8e30b02efc | ||
|
|
0aa438dce4 | ||
|
|
9c2fc8e860 | ||
|
|
b1d7a980d9 | ||
|
|
19d0a88b55 | ||
|
|
40567dee0e | ||
|
|
4b540a2297 | ||
|
|
8a62d55efe | ||
|
|
10fce6c0fe | ||
|
|
d31d49a9bb | ||
|
|
2e91f5ffa5 | ||
|
|
8f19c45a81 | ||
|
|
c63e05983d | ||
|
|
678a982535 | ||
|
|
b2c02e6c5e | ||
|
|
7e05b0317f | ||
|
|
19f06dfaed | ||
|
|
1680a18578 | ||
|
|
e8f440ca5c | ||
|
|
7deff76f49 | ||
|
|
cd0afb9536 | ||
|
|
668a953cd8 | ||
|
|
8bfbaa74f6 | ||
|
|
3ccf5ee620 | ||
|
|
b44243c021 | ||
|
|
4ae81b5a79 | ||
|
|
189f4c19a5 | ||
|
|
92a3d74af5 | ||
|
|
bb73a10332 | ||
|
|
3baf1e8c7b | ||
|
|
fdb49f5fb4 | ||
|
|
2eedcc1626 | ||
|
|
6faecbd5d8 | ||
|
|
34ed05c62f | ||
|
|
ce83d6eb40 | ||
|
|
2271cb6c7c | ||
|
|
a42b30c96e | ||
|
|
ce25d16222 | ||
|
|
b392e093e3 | ||
|
|
0d5b7298db | ||
|
|
2063ebb74d | ||
|
|
0408d7ab5d | ||
|
|
d52451f9d2 | ||
|
|
ca9f77006a | ||
|
|
e8e8d925f3 | ||
|
|
623aab4c28 | ||
|
|
3bc81d471e | ||
|
|
dfddb5cfa1 | ||
|
|
80f5bde0cb | ||
|
|
9de072161e | ||
|
|
d08a7440bc | ||
|
|
7a4bb2496d | ||
|
|
f68ab40d26 | ||
|
|
796d490fb7 | ||
|
|
2964d5a6db | ||
|
|
90b57dacee | ||
|
|
6af17e2509 | ||
|
|
5193b2aa7d | ||
|
|
3f644f07db | ||
|
|
d988f98b81 | ||
|
|
10634c7b77 | ||
|
|
135d505192 | ||
|
|
3f2be8a6ca | ||
|
|
79bef09ee7 | ||
|
|
3534f6afac | ||
|
|
106c1d069c | ||
|
|
8ed0afe80d | ||
|
|
6a6e3944d5 | ||
|
|
94d5b5e47e | ||
|
|
e61b0f8e34 | ||
|
|
f7fbe1de6c | ||
|
|
01de01630e | ||
|
|
f9f92e2198 | ||
|
|
7d5f50b04a | ||
|
|
72b5d25e4c | ||
|
|
cae7f36531 | ||
|
|
aa79f49e25 | ||
|
|
b4ad301d53 | ||
|
|
00ed54c4c9 | ||
|
|
ffa52794db | ||
|
|
24058d0c36 | ||
|
|
641ca67671 | ||
|
|
52ee2e0a8b | ||
|
|
724fc7f37e | ||
|
|
9d279b104b | ||
|
|
eb61f70164 | ||
|
|
b3a8201768 | ||
|
|
185795954b | ||
|
|
cc62cc99d2 | ||
|
|
270349f37c | ||
|
|
977888070a | ||
|
|
192d0f2bf3 | ||
|
|
f2ec7884ec | ||
|
|
815975a4d2 | ||
|
|
f96a0238fc | ||
|
|
f695bd0959 | ||
|
|
efe8f46e17 | ||
|
|
515daa22a9 | ||
|
|
f11e22deaf | ||
|
|
5be976169f | ||
|
|
a6e08f3bf4 | ||
|
|
944e68a979 | ||
|
|
b3a6e33ce1 | ||
|
|
cb53ddc8e8 | ||
|
|
693417be4f | ||
|
|
5c3f91bb55 | ||
|
|
8a219d0732 | ||
|
|
146a544af3 | ||
|
|
48dccc6c0b | ||
|
|
ce1740cec4 | ||
|
|
d40dbeae3e | ||
|
|
5094b673c4 | ||
|
|
228e6d10e7 | ||
|
|
e90b979d15 | ||
|
|
fb05a6ca48 | ||
|
|
e055ed3afa | ||
|
|
4371c470b3 | ||
|
|
7bb237d0ef | ||
|
|
5c42354b01 | ||
|
|
387e8af422 | ||
|
|
5dca777caf | ||
|
|
0814778a14 | ||
|
|
6827af3997 | ||
|
|
435bdea8f7 | ||
|
|
4f81735af6 | ||
|
|
bef3d2f88d | ||
|
|
ba99c7dc03 | ||
|
|
f5c5162a9b | ||
|
|
a22903533e | ||
|
|
86cda58b22 | ||
|
|
7804cf9d5c | ||
|
|
2bb7036110 | ||
|
|
ba545555cf | ||
|
|
be55ca690c | ||
|
|
9013add749 | ||
|
|
3201b6da76 | ||
|
|
feb42f1f4b | ||
|
|
6f14d0eb5c | ||
|
|
7530d8f5b2 | ||
|
|
e25fe05a53 | ||
|
|
8e0ab8f780 | ||
|
|
cb2a3c2b42 | ||
|
|
1b6ec94f33 | ||
|
|
cb23edc1fe | ||
|
|
6fd05d7d72 | ||
|
|
f26ac57569 | ||
|
|
2434ac54d0 | ||
|
|
f25b557327 | ||
|
|
81a0706d01 | ||
|
|
5f6b576cbf | ||
|
|
549877f71e | ||
|
|
c6a5ba9b91 | ||
|
|
1a69d80489 | ||
|
|
b797f4302c | ||
|
|
bf9aa5c3d3 | ||
|
|
7390e19a7a | ||
|
|
b31a12a0cc | ||
|
|
26ce001782 | ||
|
|
a2c7ff3262 | ||
|
|
8fc7c716c0 | ||
|
|
c70fc3fc4b | ||
|
|
df513b7dc0 | ||
|
|
2a9598f4c6 | ||
|
|
224c20779c | ||
|
|
5d722298cb | ||
|
|
4bcc6359e3 | ||
|
|
4144afcc92 | ||
|
|
2ad27046fb | ||
|
|
9516ac6718 | ||
|
|
de638c7c36 | ||
|
|
c6b34a033b | ||
|
|
31de3399d2 | ||
|
|
0dc2ca019f | ||
|
|
04724f7f0f | ||
|
|
75a983a965 | ||
|
|
e12d8bb8ca | ||
|
|
68f1ccfed4 | ||
|
|
54272db59c | ||
|
|
6d34e88360 | ||
|
|
0a901a2eb0 | ||
|
|
e1671a0511 | ||
|
|
dcb4ec695f | ||
|
|
4a21b6fe1d | ||
|
|
96a237902b | ||
|
|
cfb51e9f80 | ||
|
|
e952f1c243 | ||
|
|
07d6ca27db | ||
|
|
8245da485a | ||
|
|
5c759217cf | ||
|
|
0648fdebc2 | ||
|
|
ed670e528f | ||
|
|
2473309a51 | ||
|
|
21ca2f11b7 | ||
|
|
ccaa28a323 | ||
|
|
fea8b376f8 | ||
|
|
55d244b726 | ||
|
|
1640a52789 | ||
|
|
424ec10692 | ||
|
|
b472c2ee18 | ||
|
|
65a01251e9 | ||
|
|
96be6bbbd1 | ||
|
|
6f7465aab7 | ||
|
|
c9bc8227bb | ||
|
|
4ea9371c00 | ||
|
|
326c74cdc6 | ||
|
|
f11c2efa8c | ||
|
|
6dd8102a82 | ||
|
|
ddf6a4955f | ||
|
|
602994e213 | ||
|
|
7c7306bf96 | ||
|
|
c24894b5de | ||
|
|
e10412c530 | ||
|
|
5c2491b6c3 | ||
|
|
036373032c | ||
|
|
db58dabd31 | ||
|
|
7ef98c05fa | ||
|
|
6351f43b9b | ||
|
|
6613c8a6c1 | ||
|
|
2b97882b42 | ||
|
|
925f386bed | ||
|
|
0fbbd54b0c | ||
|
|
12ba1fed00 | ||
|
|
2826bac53c | ||
|
|
3e7e9f354f | ||
|
|
86ff80885d | ||
|
|
be03e34406 | ||
|
|
83231becba | ||
|
|
876ee49fb0 | ||
|
|
10bec31033 | ||
|
|
fc48f29575 | ||
|
|
f0d9a452bb | ||
|
|
c03a4f83d1 | ||
|
|
10f06fde5c | ||
|
|
36b533cb16 | ||
|
|
6ff2cdab98 | ||
|
|
ef5cad1bf0 | ||
|
|
b60c7ecd9e | ||
|
|
68aaa8fee2 | ||
|
|
67bf14d428 | ||
|
|
c22ff77c89 | ||
|
|
84fc0ab1bd | ||
|
|
beb06f2f7f | ||
|
|
7cf30836bf | ||
|
|
e7ba289d06 | ||
|
|
d255ff4fd0 | ||
|
|
bcf19f4f3e | ||
|
|
efeee0e276 | ||
|
|
e789873eca | ||
|
|
14b9b76e87 | ||
|
|
38323fd24f | ||
|
|
366148a450 | ||
|
|
bc364cee0d | ||
|
|
f433277227 | ||
|
|
35fc1c87d2 | ||
|
|
9da2af8c49 | ||
|
|
103e049f22 | ||
|
|
cc217df924 | ||
|
|
939c9cd5ac | ||
|
|
6f0959a98e | ||
|
|
d71ed4d775 | ||
|
|
0af3e95f1f | ||
|
|
582f7bbfee | ||
|
|
cf2506901f | ||
|
|
db06b627cc | ||
|
|
5f2621eca9 | ||
|
|
3331462229 | ||
|
|
0f079827e5 | ||
|
|
ba66e33913 | ||
|
|
6a54ed87f3 | ||
|
|
88a9edb90a | ||
|
|
606134f39c | ||
|
|
3c03344ef1 | ||
|
|
6f5914ae6f | ||
|
|
4c00866249 | ||
|
|
04752f7473 | ||
|
|
26b4766da7 | ||
|
|
12af9cb89f | ||
|
|
958d793725 | ||
|
|
36f07ee194 | ||
|
|
91c2c21522 | ||
|
|
d6d2f52922 | ||
|
|
9162e782a0 | ||
|
|
dc41ceb99b | ||
|
|
c5e274f52a | ||
|
|
3781043c78 | ||
|
|
1485ab2677 | ||
|
|
337bf08cd3 | ||
|
|
22665aa19a | ||
|
|
1ab6b4e201 | ||
|
|
d97afb691b | ||
|
|
b63e65880f | ||
|
|
44cbe0522c | ||
|
|
fedab86c30 | ||
|
|
731dbf6c3a | ||
|
|
d00f75c814 | ||
|
|
f5b8815a84 | ||
|
|
99d06c7449 | ||
|
|
8e7b2c5837 | ||
|
|
3d3a97288a | ||
|
|
c2142cc03a | ||
|
|
3ce94de823 |
44
.air.toml
Normal file
44
.air.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
root = "."
|
||||
testdata_dir = "testdata"
|
||||
tmp_dir = "tmp"
|
||||
|
||||
[build]
|
||||
args_bin = ["server"]
|
||||
bin = "./tmp/main"
|
||||
cmd = "go build -o ./tmp/main ."
|
||||
delay = 0
|
||||
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
|
||||
exclude_file = []
|
||||
exclude_regex = ["_test.go"]
|
||||
exclude_unchanged = false
|
||||
follow_symlink = false
|
||||
full_bin = ""
|
||||
include_dir = []
|
||||
include_ext = ["go", "tpl", "tmpl", "html"]
|
||||
include_file = []
|
||||
kill_delay = "0s"
|
||||
log = "build-errors.log"
|
||||
poll = false
|
||||
poll_interval = 0
|
||||
rerun = false
|
||||
rerun_delay = 500
|
||||
send_interrupt = false
|
||||
stop_on_error = false
|
||||
|
||||
[color]
|
||||
app = ""
|
||||
build = "yellow"
|
||||
main = "magenta"
|
||||
runner = "green"
|
||||
watcher = "cyan"
|
||||
|
||||
[log]
|
||||
main_only = false
|
||||
time = false
|
||||
|
||||
[misc]
|
||||
clean_on_exit = false
|
||||
|
||||
[screen]
|
||||
clear_on_rebuild = false
|
||||
keep_scroll = true
|
||||
13
.github/FUNDING.yml
vendored
Normal file
13
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
custom: []
|
||||
81
.github/ISSUE_TEMPLATE/00-bug_report_zh.yml
vendored
Normal file
81
.github/ISSUE_TEMPLATE/00-bug_report_zh.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: "错误报告"
|
||||
description: 错误报告 / 问题
|
||||
title: "[BUG] 请修改标题为您遇到的问题"
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
感谢您花时间填写此错误报告。
|
||||
请**务必**确认您的问题无重复,且不是因为您的操作、网络或第三方软件问题。
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 请确认以下事项
|
||||
description: |
|
||||
您必须勾选以下内容,否则您的问题可能会被直接关闭。
|
||||
或者您可以去[讨论区](https://github.com/OpenListTeam/OpenList/discussions)。
|
||||
options:
|
||||
- label: |
|
||||
我已确认阅读并同意 [AGPL-3.0 第15条](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=15.%20Disclaimer%20of%20Warranty.) 。
|
||||
本程序不提供任何明示或暗示的担保,使用风险由您自行承担。
|
||||
- label: |
|
||||
我已确认阅读并同意 [AGPL-3.0 第16条](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=16.%20Limitation%20of%20Liability.) 。
|
||||
无论何种情况,版权持有人或其他分发者均不对使用本程序所造成的任何损失承担责任。
|
||||
- label: |
|
||||
我确认我的描述清晰,语法礼貌,能帮助开发者快速定位问题,并符合社区规则。
|
||||
- label: |
|
||||
我已确认阅读了[OpenList文档](https://docs.oplist.org)。
|
||||
- label: |
|
||||
我已确认没有重复的问题或讨论。
|
||||
- label: |
|
||||
我已确认是`OpenList`的问题,而不是其他原因(例如 [网络](https://docs.oplist.org/zh/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host) ,`依赖`或`操作`)。
|
||||
- label: |
|
||||
我认为此问题必须由`OpenList`处理,而非第三方。
|
||||
- label: |
|
||||
我已确认这个问题在最新版本中没有被修复。
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: OpenList 版本(必填)
|
||||
description: |
|
||||
您使用的是哪个版本的软件?请不要使用`latest`或`master`作为答案。
|
||||
placeholder: v4.xx.xx
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: driver
|
||||
attributes:
|
||||
label: 使用的存储驱动(必填)
|
||||
description: |
|
||||
您使用的是哪个存储驱动?
|
||||
placeholder: "例如: OneDrive"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: 问题描述(必填)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: 配置文件内容(必填)
|
||||
description: |
|
||||
请提供您的`OpenList`应用的配置文件,并截图相关存储配置。(可隐藏隐私字段)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: 日志(可选)
|
||||
description: |
|
||||
请复制粘贴错误日志,或者截图。(可隐藏隐私字段)
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: 复现链接(可选)
|
||||
description: |
|
||||
请提供能复现此问题的链接。
|
||||
81
.github/ISSUE_TEMPLATE/01-bug_report_en.yml
vendored
Normal file
81
.github/ISSUE_TEMPLATE/01-bug_report_en.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: "Bug Report"
|
||||
description: Bug Report / Issue
|
||||
title: "[BUG] Please modify the title to describe the issue you are facing"
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out this bug report.
|
||||
Please **make sure** your issue is not a duplicate and is not caused by your own operation, network, or third-party software.
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Please confirm the following
|
||||
description: |
|
||||
You must check all the following, otherwise your issue may be closed directly.
|
||||
Or you can go to the [discussions](https://github.com/OpenListTeam/OpenList/discussions).
|
||||
options:
|
||||
- label: |
|
||||
I have read and agree to [AGPL-3.0 Section 15](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=15.%20Disclaimer%20of%20Warranty.) .
|
||||
The program is provided "as is" without any warranties; you bear all risks of using it.
|
||||
- label: |
|
||||
I have read and agree to [AGPL-3.0 Section 16](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=16.%20Limitation%20of%20Liability.) .
|
||||
The copyright holders and distributors are not liable for any damages resulting from the use or inability to use the program.
|
||||
- label: |
|
||||
I confirm my description is clear, polite, helps developers quickly locate the issue, and complies with community rules.
|
||||
- label: |
|
||||
I have read the [OpenList documentation](https://docs.oplist.org).
|
||||
- label: |
|
||||
I confirm there are no duplicate issues or discussions.
|
||||
- label: |
|
||||
I confirm this is an `OpenList` issue, not caused by other reasons (such as [network](https://docs.oplist.org/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host), dependencies, or operation).
|
||||
- label: |
|
||||
I believe this issue must be handled by `OpenList` and not by a third party.
|
||||
- label: |
|
||||
I confirm this issue is not fixed in the latest version.
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: OpenList Version (required)
|
||||
description: |
|
||||
What version of the software are you using? Please do not use `latest` or `master` as the answer.
|
||||
placeholder: v4.xx.xx
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: driver
|
||||
attributes:
|
||||
label: Storage Driver Used (required)
|
||||
description: |
|
||||
Which storage driver are you using?
|
||||
placeholder: "e.g. OneDrive"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: Bug Description (required)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Configuration File Content (required)
|
||||
description: |
|
||||
Please provide your `OpenList` application's configuration file and a screenshot of the relevant storage configuration. (You may mask sensitive fields)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs (optional)
|
||||
description: |
|
||||
Please copy and paste any relevant log output or screenshots. (You may mask sensitive fields)
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: Reproduction Link (optional)
|
||||
description: |
|
||||
Please provide a link to a repo or page that can reproduce this issue.
|
||||
48
.github/ISSUE_TEMPLATE/02-feature_request_zh.yml
vendored
Normal file
48
.github/ISSUE_TEMPLATE/02-feature_request_zh.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "功能请求"
|
||||
description: 功能请求 / 增强
|
||||
title: "[Feature] 请修改标题为您的功能名称"
|
||||
labels: [enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 请确认以下事项
|
||||
description: |
|
||||
您必须勾选以下内容,否则您的问题可能会被直接关闭。
|
||||
或者您可以去[讨论区](https://github.com/OpenListTeam/OpenList/discussions)。
|
||||
options:
|
||||
- label: |
|
||||
我已确认阅读并同意 [AGPL-3.0 第15条](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=15.%20Disclaimer%20of%20Warranty.) 。
|
||||
本程序不提供任何明示或暗示的担保,使用风险由您自行承担。
|
||||
- label: |
|
||||
我已确认阅读并同意 [AGPL-3.0 第16条](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=16.%20Limitation%20of%20Liability.) 。
|
||||
无论何种情况,版权持有人或其他分发者均不对使用本程序所造成的任何损失承担责任。
|
||||
- label: |
|
||||
我确认我的描述清晰,语法礼貌,能帮助开发者快速定位问题,并符合社区规则。
|
||||
- label: |
|
||||
我已确认阅读了[OpenList文档](https://docs.oplist.org)。
|
||||
- label: |
|
||||
我已确认没有重复的问题或讨论。
|
||||
- label: |
|
||||
我认为此问题必须由`OpenList`处理,而非第三方。
|
||||
- label: |
|
||||
我已确认此功能尚未被实现。
|
||||
- label: |
|
||||
我已确认此功能是合理的,且有普遍需求,并非我个人需要。
|
||||
- type: textarea
|
||||
id: feature-description
|
||||
attributes:
|
||||
label: 需求描述
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: suggested-solution
|
||||
attributes:
|
||||
label: 实现思路
|
||||
description: |
|
||||
实现此需求的解决思路。
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: 附加信息
|
||||
description: |
|
||||
相关的任何其他上下文或截图,或者你觉得有帮助的信息
|
||||
48
.github/ISSUE_TEMPLATE/03-feature_request_en.yml
vendored
Normal file
48
.github/ISSUE_TEMPLATE/03-feature_request_en.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: "Feature Request"
|
||||
description: Feature Request / Enhancement
|
||||
title: "[Feature] Please change the title to your feature name"
|
||||
labels: [enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Please confirm the following
|
||||
description: |
|
||||
You must check all the following, otherwise your request may be closed directly.
|
||||
Or you can go to the [discussions](https://github.com/OpenListTeam/OpenList/discussions).
|
||||
options:
|
||||
- label: |
|
||||
I have read and agree to [AGPL-3.0 Section 15](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=15.%20Disclaimer%20of%20Warranty.).
|
||||
The program is provided "as is" without any warranties; you bear all risks of using it.
|
||||
- label: |
|
||||
I have read and agree to [AGPL-3.0 Section 16](https://www.gnu.org/licenses/agpl-3.0.txt#:~:text=16.%20Limitation%20of%20Liability.).
|
||||
The copyright holders and distributors are not liable for any damages resulting from the use or inability to use the program.
|
||||
- label: |
|
||||
I confirm my description is clear, polite, helps developers quickly locate the issue, and complies with community rules.
|
||||
- label: |
|
||||
I have read the [OpenList documentation](https://docs.oplist.org).
|
||||
- label: |
|
||||
I confirm there are no duplicate issues or discussions.
|
||||
- label: |
|
||||
I believe this issue must be handled by `OpenList` and not by a third party.
|
||||
- label: |
|
||||
I confirm this feature has not been implemented yet.
|
||||
- label: |
|
||||
I confirm this feature is reasonable and has general demand, not just my personal need.
|
||||
- type: textarea
|
||||
id: feature-description
|
||||
attributes:
|
||||
label: Feature Description
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: suggested-solution
|
||||
attributes:
|
||||
label: Suggested Solution
|
||||
description: |
|
||||
Solution or approach to achieve this feature.
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional Information
|
||||
description: |
|
||||
Any other context or screenshots related to this feature request, or information you find helpful.
|
||||
39
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
39
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: "Bug report"
|
||||
description: Bug report
|
||||
labels: [pending triage]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Alist Version / Alist 版本
|
||||
description: What version of our software are you running?
|
||||
placeholder: v2.0.0
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: Describe the bug / 问题描述
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: Reproduction / 复现链接
|
||||
description: |
|
||||
Please provide a link to a repo that can reproduce the problem you ran into.
|
||||
请提供能复现此问题的链接
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: 日志 / Logs
|
||||
description: |
|
||||
Please copy and paste any relevant log output.
|
||||
请复制粘贴错误日志,或者截图
|
||||
render: shell
|
||||
17
.github/ISSUE_TEMPLATE/config.yml
vendored
17
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,14 @@
|
||||
blank_issues_enabled: false
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Questions & Discussions & Feature request
|
||||
url: https://github.com/Xhofe/alist/discussions
|
||||
about: Use GitHub discussions for message-board style questions and discussions or feature request.
|
||||
- name: 问题和讨论
|
||||
url: https://github.com/OpenListTeam/OpenList/discussions
|
||||
about: 讨论、问题、想法等
|
||||
- name: Questions & Discussions
|
||||
url: https://github.com/OpenListTeam/OpenList/discussions
|
||||
about: Discuss issues, ideas, etc.
|
||||
- name: 即时聊天
|
||||
url: https://t.me/OpenListTeam
|
||||
about: 与我们聊天
|
||||
- name: Chat
|
||||
url: https://t.me/OpenListTeam
|
||||
about: Chat with us
|
||||
|
||||
21
.github/config.yml
vendored
Normal file
21
.github/config.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Configuration for welcome - https://github.com/behaviorbot/welcome
|
||||
|
||||
# Configuration for new-issue-welcome - https://github.com/behaviorbot/new-issue-welcome
|
||||
|
||||
# Comment to be posted to on first time issues
|
||||
newIssueWelcomeComment: >
|
||||
Thanks for opening your first issue here! Be sure to follow the issue template!
|
||||
|
||||
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
|
||||
|
||||
# Comment to be posted to on PRs from first time contributors in your repository
|
||||
newPRWelcomeComment: >
|
||||
Thanks for opening this pull request! Please check out our contributing guidelines.
|
||||
|
||||
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
|
||||
|
||||
# Comment to be posted to on pull requests merged by a first time user
|
||||
firstPRMergeComment: >
|
||||
Congrats on merging your first pull request! We here at behavior bot are proud of you!
|
||||
|
||||
# It is recommend to include as many gifs and emojis as possible
|
||||
21
.github/stale.yml
vendored
Normal file
21
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 44
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 20
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- accepted
|
||||
- security
|
||||
- working
|
||||
- pr-welcome
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: >
|
||||
This issue was closed due to inactive more than 52 days. You can reopen or
|
||||
recreate it if you think it should continue. Thank you for your contributions again.
|
||||
143
.github/workflows/beta_release.yml
vendored
Normal file
143
.github/workflows/beta_release.yml
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
name: Beta Release builds
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: ["1.21"]
|
||||
name: Beta Release Changelog
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or update ref
|
||||
id: create-or-update-ref
|
||||
uses: ovsds/create-or-update-ref-action@v1
|
||||
with:
|
||||
ref: tags/beta
|
||||
sha: ${{ github.sha }}
|
||||
|
||||
- name: Delete beta tag
|
||||
run: git tag -d beta
|
||||
continue-on-error: true
|
||||
|
||||
- name: changelog # or changelogithub@0.12 if ensure the stable result
|
||||
id: changelog
|
||||
run: |
|
||||
git tag -l
|
||||
npx changelogithub --output CHANGELOG.md
|
||||
|
||||
- name: Upload assets to beta release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body_path: CHANGELOG.md
|
||||
files: CHANGELOG.md
|
||||
prerelease: true
|
||||
tag_name: beta
|
||||
|
||||
- name: Upload assets to github artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: beta changelog
|
||||
path: ${{ github.workspace }}/CHANGELOG.md
|
||||
compression-level: 0
|
||||
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
||||
|
||||
release:
|
||||
needs:
|
||||
- changelog
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: "!(*musl*|*windows-arm64*|*android*|*freebsd*)" # xgo
|
||||
hash: "md5"
|
||||
- target: "linux-!(arm*)-musl*" #musl-not-arm
|
||||
hash: "md5-linux-musl"
|
||||
- target: "linux-arm*-musl*" #musl-arm
|
||||
hash: "md5-linux-musl-arm"
|
||||
- target: "windows-arm64" #win-arm64
|
||||
hash: "md5-windows-arm64"
|
||||
- target: "android-*" #android
|
||||
hash: "md5-android"
|
||||
- target: "freebsd-*" #freebsd
|
||||
hash: "md5-freebsd"
|
||||
|
||||
name: Beta Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
|
||||
- name: Setup web
|
||||
run: bash build.sh dev web
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
uses: OpenListTeam/cgo-actions@v1.1.2
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
musl-target-format: $os-$musl-$arch
|
||||
out-dir: build
|
||||
output: openlist-$target$ext
|
||||
musl-base-url: "https://github.com/OpenListTeam/musl-compilers/releases/latest/download/"
|
||||
x-flags: |
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.BuiltAt=$built_at
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.GitAuthor=OpenList
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.GitCommit=$git_commit
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.Version=$tag
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.WebVersion=dev
|
||||
|
||||
- name: Compress
|
||||
run: |
|
||||
bash build.sh zip ${{ matrix.hash }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# See above
|
||||
- name: Upload assets to beta release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
prerelease: true
|
||||
tag_name: beta
|
||||
|
||||
- name: Clean illegal characters from matrix.target
|
||||
id: clean_target_name
|
||||
run: |
|
||||
ILLEGAL_CHARS_REGEX='[":<>|*?\\/\r\n]'
|
||||
CLEANED_TARGET=$(echo "${{ matrix.target }}" | sed -E "s/$ILLEGAL_CHARS_REGEX//g")
|
||||
echo "Original target: ${{ matrix.target }}"
|
||||
echo "Cleaned target: $CLEANED_TARGET"
|
||||
echo "cleaned_target=$CLEANED_TARGET" >> $GITHUB_ENV
|
||||
|
||||
- name: Upload assets to github artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: beta builds for ${{ env.cleaned_target }}
|
||||
path: ${{ github.workspace }}/build/compress/*
|
||||
compression-level: 0
|
||||
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
||||
80
.github/workflows/build.yml
vendored
80
.github/workflows/build.yml
vendored
@@ -1,59 +1,63 @@
|
||||
name: build
|
||||
name: Test Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ v2 ]
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: [ v2 ]
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.17]
|
||||
target:
|
||||
- darwin-amd64
|
||||
- darwin-arm64
|
||||
- windows-amd64
|
||||
- linux-arm64-musl
|
||||
- linux-amd64-musl
|
||||
- windows-arm64
|
||||
- android-arm64
|
||||
name: Build
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
# - name: Setup docker
|
||||
# uses: docker-practice/actions-setup-docker@master
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: v2
|
||||
path: alist
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout web repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: Xhofe/alist-web
|
||||
ref: v2
|
||||
path: alist-web
|
||||
- uses: benjlevesque/short-sha@v3.0
|
||||
id: short-sha
|
||||
|
||||
- name: Set up xgo
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
sudo apt install upx
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
|
||||
- name: Setup web
|
||||
run: bash build.sh dev web
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mv alist/build.sh .
|
||||
bash build.sh
|
||||
uses: OpenListTeam/cgo-actions@v1.1.2
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
musl-target-format: $os-$musl-$arch
|
||||
out-dir: build
|
||||
x-flags: |
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.BuiltAt=$built_at
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.GitAuthor=OpenList
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.GitCommit=$git_commit
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.Version=$tag
|
||||
github.com/OpenListTeam/OpenList/v4/internal/conf.WebVersion=dev
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifact
|
||||
path: alist/build
|
||||
name: openlist_${{ env.SHA }}_${{ matrix.target }}
|
||||
path: build/*
|
||||
|
||||
27
.github/workflows/changelog.yml
vendored
Normal file
27
.github/workflows/changelog.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Automatic changelog
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Delete beta tag
|
||||
run: git tag -d beta
|
||||
continue-on-error: true
|
||||
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
48
.github/workflows/docker.yml
vendored
48
.github/workflows/docker.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'v2'
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'v2'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: xhofe/alist
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Build web
|
||||
run: bash build.sh web
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
145
.github/workflows/release.yml
vendored
145
.github/workflows/release.yml
vendored
@@ -1,69 +1,132 @@
|
||||
name: release
|
||||
name: Release builds
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.17]
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
|
||||
# all of these default to true, but feel free to set to
|
||||
# "false" if necessary for your workflow
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
|
||||
- name: Prerelease
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
id: ${{ github.event.release.id }}
|
||||
prerelease: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
# - name: Setup docker
|
||||
# uses: docker-practice/actions-setup-docker@master
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: v2
|
||||
path: alist
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout web repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: Xhofe/alist-web
|
||||
ref: v2
|
||||
path: alist-web
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up xgo
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
sudo snap install zig --classic --beta
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
mv alist/build.sh .
|
||||
bash build.sh release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload asserts files
|
||||
uses: ad-m/github-push-action@master
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: cdn
|
||||
directory: alist-web
|
||||
repository: Xhofe/alist-web
|
||||
files: build/compress/*
|
||||
prerelease: false
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
release-lite:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release Lite
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
files: alist/build/compress/*
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
|
||||
# all of these default to true, but feel free to set to
|
||||
# "false" if necessary for your workflow
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
|
||||
- name: Prerelease
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
id: ${{ github.event.release.id }}
|
||||
prerelease: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo snap install zig --classic --beta
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release lite
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
prerelease: false
|
||||
|
||||
|
||||
69
.github/workflows/release_android.yml
vendored
Normal file
69
.github/workflows/release_android.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: Release builds (Android)
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release_android:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release android
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
||||
release_android_lite:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release lite android
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
294
.github/workflows/release_docker.yml
vendored
Normal file
294
.github/workflows/release_docker.yml
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
name: Release builds (Docker)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
manual_tag:
|
||||
description: 'Tag name (like v0.1.0). Required if as_latest is true.'
|
||||
required: false
|
||||
type: string
|
||||
as_latest:
|
||||
description: 'Tag as latest?'
|
||||
required: true
|
||||
default: 'false'
|
||||
type: choice
|
||||
options:
|
||||
- 'true'
|
||||
- 'false'
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DOCKERHUB_ORG_NAME: ${{ vars.DOCKERHUB_ORG_NAME || 'openlistteam' }}
|
||||
GHCR_ORG_NAME: ${{ vars.GHCR_ORG_NAME || 'openlistteam' }}
|
||||
IMAGE_NAME: openlist-git
|
||||
IMAGE_NAME_DOCKERHUB: openlist
|
||||
REGISTRY: ghcr.io
|
||||
ARTIFACT_NAME: 'binaries_docker_release'
|
||||
ARTIFACT_NAME_LITE: 'binaries_docker_release_lite'
|
||||
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
||||
IMAGE_PUSH: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
|
||||
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' || github.event.inputs.as_latest == 'true' }}
|
||||
IMAGE_TAGS_BETA: |
|
||||
type=raw,value=beta,enable={{is_default_branch}}
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build_binary:
|
||||
name: Build Binaries for Docker Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Cache Musl
|
||||
id: cache-musl
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: build/musl-libs
|
||||
key: docker-musl-libs-v2
|
||||
|
||||
- name: Download Musl Library
|
||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||
run: bash build.sh prepare docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build go binary (beta)
|
||||
if: env.IMAGE_IS_PROD != 'true'
|
||||
run: bash build.sh beta docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build go binary (release)
|
||||
if: env.IMAGE_IS_PROD == 'true'
|
||||
run: bash build.sh release docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
overwrite: true
|
||||
path: |
|
||||
build/
|
||||
!build/*.tgz
|
||||
!build/musl-libs/**
|
||||
|
||||
build_binary_lite:
|
||||
name: Build Binaries for Docker Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Cache Musl
|
||||
id: cache-musl
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: build/musl-libs
|
||||
key: docker-musl-libs-v2
|
||||
|
||||
- name: Download Musl Library
|
||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||
run: bash build.sh prepare lite docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build go binary (beta)
|
||||
if: env.IMAGE_IS_PROD != 'true'
|
||||
run: bash build.sh beta lite docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build go binary (release)
|
||||
if: env.IMAGE_IS_PROD == 'true'
|
||||
run: bash build.sh release lite docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME_LITE }}
|
||||
overwrite: true
|
||||
path: |
|
||||
build/
|
||||
!build/*.tgz
|
||||
!build/musl-libs/**
|
||||
|
||||
release_docker:
|
||||
needs: build_binary
|
||||
name: Release Docker image
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["latest", "ffmpeg", "aria2", "aio"]
|
||||
include:
|
||||
- image: "latest"
|
||||
build_arg: ""
|
||||
tag_favor: ""
|
||||
- image: "ffmpeg"
|
||||
build_arg: INSTALL_FFMPEG=true
|
||||
tag_favor: "suffix=-ffmpeg,onlatest=true"
|
||||
- image: "aria2"
|
||||
build_arg: INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aria2,onlatest=true"
|
||||
- image: "aio"
|
||||
build_arg: |
|
||||
INSTALL_FFMPEG=true
|
||||
INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aio,onlatest=true"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
path: 'build/'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to DockerHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_ORG_NAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.GHCR_ORG_NAME }}/${{ env.IMAGE_NAME }}
|
||||
${{ env.DOCKERHUB_ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
|
||||
tags: >
|
||||
${{ env.IMAGE_IS_PROD == 'true' && (
|
||||
github.event_name == 'workflow_dispatch'
|
||||
&& format('type=raw,value={0}', github.event.inputs.manual_tag)
|
||||
|| format('type=raw,value={0}', github.ref_name)
|
||||
) || env.IMAGE_TAGS_BETA }}
|
||||
flavor: |
|
||||
latest=${{ env.IMAGE_IS_PROD }}
|
||||
${{ matrix.tag_favor }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: ${{ env.IMAGE_PUSH == 'true' }}
|
||||
build-args: ${{ matrix.build_arg }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ env.RELEASE_PLATFORMS }}
|
||||
|
||||
release_docker_lite:
|
||||
needs: build_binary_lite
|
||||
name: Release Docker image
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["latest", "ffmpeg", "aria2", "aio"]
|
||||
include:
|
||||
- image: "latest"
|
||||
build_arg: ""
|
||||
tag_favor: "suffix=-lite,onlatest=true"
|
||||
- image: "ffmpeg"
|
||||
build_arg: INSTALL_FFMPEG=true
|
||||
tag_favor: "suffix=-lite-ffmpeg,onlatest=true"
|
||||
- image: "aria2"
|
||||
build_arg: INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-lite-aria2,onlatest=true"
|
||||
- image: "aio"
|
||||
build_arg: |
|
||||
INSTALL_FFMPEG=true
|
||||
INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-lite-aio,onlatest=true"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME_LITE }}
|
||||
path: 'build/'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to DockerHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_ORG_NAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.GHCR_ORG_NAME }}/${{ env.IMAGE_NAME }}
|
||||
${{ env.DOCKERHUB_ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
|
||||
tags: >
|
||||
${{ env.IMAGE_IS_PROD == 'true' && (
|
||||
github.event_name == 'workflow_dispatch'
|
||||
&& format('type=raw,value={0}', github.event.inputs.manual_tag)
|
||||
|| format('type=raw,value={0}', github.ref_name)
|
||||
) || env.IMAGE_TAGS_BETA }}
|
||||
flavor: |
|
||||
latest=${{ env.IMAGE_IS_PROD }}
|
||||
${{ matrix.tag_favor }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: ${{ env.IMAGE_PUSH == 'true' }}
|
||||
build-args: ${{ matrix.build_arg }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ env.RELEASE_PLATFORMS }}
|
||||
69
.github/workflows/release_freebsd.yml
vendored
Normal file
69
.github/workflows/release_freebsd.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: Release builds (Freebsd)
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release_freebsd:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release freebsd
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
||||
release_freebsd_lite:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release lite freebsd
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
69
.github/workflows/release_linux_musl.yml
vendored
Normal file
69
.github/workflows/release_linux_musl.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: Release builds (linux_musl)
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release_linux_musl:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release linux_musl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
||||
release_linux_musl_lite:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release lite linux_musl
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
70
.github/workflows/release_linux_musl_arm.yml
vendored
Normal file
70
.github/workflows/release_linux_musl_arm.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Release builds (linux_musl_arm)
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release_linux_musl_arm:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release linux_musl_arm
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
||||
release_linux_musl_arm_lite:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release lite linux_musl_arm
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
||||
144
.github/workflows/test_docker.yml
vendored
Normal file
144
.github/workflows/test_docker.yml
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
name: Docker Beta Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DOCKERHUB_ORG_NAME: ${{ vars.DOCKERHUB_ORG_NAME || 'openlistteam' }}
|
||||
GHCR_ORG_NAME: ${{ vars.GHCR_ORG_NAME || 'openlistteam' }}
|
||||
IMAGE_NAME: openlist-git
|
||||
IMAGE_NAME_DOCKERHUB: openlist
|
||||
REGISTRY: ghcr.io
|
||||
ARTIFACT_NAME: 'binaries_docker_release'
|
||||
ARTIFACT_NAME_LITE: 'binaries_docker_release_lite'
|
||||
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
||||
IMAGE_PUSH: ${{ github.event_name == 'push' }}
|
||||
IMAGE_TAGS_BETA: |
|
||||
type=ref,event=pr
|
||||
type=raw,value=beta,enable={{is_default_branch}}
|
||||
|
||||
jobs:
|
||||
build_binary:
|
||||
name: Build Binaries for Docker Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Cache Musl
|
||||
id: cache-musl
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: build/musl-libs
|
||||
key: docker-musl-libs-v2
|
||||
|
||||
- name: Download Musl Library
|
||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||
run: bash build.sh prepare docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build go binary (beta)
|
||||
run: bash build.sh beta docker-multiplatform
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
overwrite: true
|
||||
path: |
|
||||
build/
|
||||
!build/*.tgz
|
||||
!build/musl-libs/**
|
||||
|
||||
release_docker:
|
||||
needs: build_binary
|
||||
name: Release Docker image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["latest", "ffmpeg", "aria2", "aio"]
|
||||
include:
|
||||
- image: "latest"
|
||||
build_arg: ""
|
||||
tag_favor: ""
|
||||
- image: "ffmpeg"
|
||||
build_arg: INSTALL_FFMPEG=true
|
||||
tag_favor: "suffix=-ffmpeg,onlatest=true"
|
||||
- image: "aria2"
|
||||
build_arg: INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aria2,onlatest=true"
|
||||
- image: "aio"
|
||||
build_arg: |
|
||||
INSTALL_FFMPEG=true
|
||||
INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aio,onlatest=true"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
path: 'build/'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to DockerHub Container Registry
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_ORG_NAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.GHCR_ORG_NAME }}/${{ env.IMAGE_NAME }}
|
||||
${{ env.DOCKERHUB_ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
|
||||
tags: ${{ env.IMAGE_TAGS_BETA }}
|
||||
flavor: |
|
||||
${{ matrix.tag_favor }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: ${{ env.IMAGE_PUSH == 'true' }}
|
||||
build-args: ${{ matrix.build_arg }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ env.RELEASE_PLATFORMS }}
|
||||
40
.github/workflows/trigger-makefile-update.yml
vendored
Normal file
40
.github/workflows/trigger-makefile-update.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: Trigger OpenWRT Update
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Release tag to trigger update for'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
trigger-makefile-update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger Makefile hash update
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.EXTERNAL_REPO_TOKEN_LUCI_APP_OPENLIST }}
|
||||
repository: ${{ vars.HOOK_REPO || 'OpenListTeam/luci-app-openlist' }}
|
||||
event-type: update-hashes
|
||||
client-payload: |
|
||||
{
|
||||
"source_repository": "${{ github.repository }}",
|
||||
"release_tag": "${{ inputs.tag || github.ref_name }}",
|
||||
"release_name": "${{ inputs.tag || github.ref_name }}",
|
||||
"release_url": "${{ github.server_url }}/${{ github.repository }}/releases/tag/${{ inputs.tag || github.ref_name }}",
|
||||
"triggered_by": "${{ github.actor }}",
|
||||
"trigger_reason": "${{ github.event_name }}"
|
||||
}
|
||||
|
||||
- name: Log trigger information
|
||||
run: |
|
||||
echo "🚀 Successfully triggered Makefile hash update"
|
||||
echo "📦 Target repository: OpenListTeam/luci-app-openlist"
|
||||
echo "🏷️ Tag: ${{ inputs.tag || github.ref_name }}"
|
||||
echo "👤 Triggered by: ${{ github.actor }}"
|
||||
echo "📅 Trigger time: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,7 +1,7 @@
|
||||
.idea/
|
||||
.DS_Store
|
||||
output/
|
||||
dist/
|
||||
/dist/
|
||||
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
@@ -20,9 +20,15 @@ dist/
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
bin/*
|
||||
/alist
|
||||
/bin/*
|
||||
*.json
|
||||
public/index.html
|
||||
public/assets/
|
||||
data/
|
||||
/build
|
||||
/data/
|
||||
/tmp/
|
||||
/log/
|
||||
/lang/
|
||||
/daemon/
|
||||
/public/dist/*
|
||||
/!public/dist/README.md
|
||||
|
||||
.VSCodeCounter
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
[Telegram Group](https://t.me/OpenListTeam).
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
107
CONTRIBUTING.md
Normal file
107
CONTRIBUTING.md
Normal file
@@ -0,0 +1,107 @@
|
||||
# Contributing
|
||||
|
||||
## Setup your machine
|
||||
|
||||
`OpenList` is written in [Go](https://golang.org/) and [React](https://reactjs.org/).
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- [git](https://git-scm.com)
|
||||
- [Go 1.20+](https://golang.org/doc/install)
|
||||
- [gcc](https://gcc.gnu.org/)
|
||||
- [nodejs](https://nodejs.org/)
|
||||
|
||||
Clone `OpenList` and `OpenList-Frontend` anywhere:
|
||||
|
||||
```shell
|
||||
$ git clone https://github.com/OpenListTeam/OpenList.git
|
||||
$ git clone --recurse-submodules https://github.com/OpenListTeam/OpenList-Frontend.git
|
||||
```
|
||||
You should switch to the `main` branch for development.
|
||||
|
||||
## Preview your change
|
||||
### backend
|
||||
```shell
|
||||
$ go run main.go
|
||||
```
|
||||
### frontend
|
||||
```shell
|
||||
$ pnpm dev
|
||||
```
|
||||
|
||||
## Add a new driver
|
||||
Copy `drivers/template` folder and rename it, and follow the comments in it.
|
||||
|
||||
## Create a commit
|
||||
|
||||
Commit messages should be well formatted, and to make that "standardized".
|
||||
|
||||
### Commit Message Format
|
||||
Each commit message consists of a **header**, a **body** and a **footer**. The header has a special
|
||||
format that includes a **type**, a **scope** and a **subject**:
|
||||
|
||||
```
|
||||
<type>(<scope>): <subject>
|
||||
<BLANK LINE>
|
||||
<body>
|
||||
<BLANK LINE>
|
||||
<footer>
|
||||
```
|
||||
|
||||
The **header** is mandatory and the **scope** of the header is optional.
|
||||
|
||||
Any line of the commit message cannot be longer than 100 characters! This allows the message to be easier
|
||||
to read on GitHub as well as in various git tools.
|
||||
|
||||
### Revert
|
||||
If the commit reverts a previous commit, it should begin with `revert: `, followed by the header
|
||||
of the reverted commit.
|
||||
In the body it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit
|
||||
being reverted.
|
||||
|
||||
### Type
|
||||
Must be one of the following:
|
||||
|
||||
* **feat**: A new feature
|
||||
* **fix**: A bug fix
|
||||
* **docs**: Documentation only changes
|
||||
* **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing
|
||||
semi-colons, etc)
|
||||
* **refactor**: A code change that neither fixes a bug nor adds a feature
|
||||
* **perf**: A code change that improves performance
|
||||
* **test**: Adding missing or correcting existing tests
|
||||
* **build**: Affects project builds or dependency modifications
|
||||
* **revert**: Restore the previous commit
|
||||
* **ci**: Continuous integration of related file modifications
|
||||
* **chore**: Changes to the build process or auxiliary tools and libraries such as documentation
|
||||
generation
|
||||
* **release**: Release a new version
|
||||
|
||||
### Scope
|
||||
The scope could be anything specifying place of the commit change. For example `$location`,
|
||||
`$browser`, `$compile`, `$rootScope`, `ngHref`, `ngClick`, `ngView`, etc...
|
||||
|
||||
You can use `*` when the change affects more than a single scope.
|
||||
|
||||
### Subject
|
||||
The subject contains succinct description of the change:
|
||||
|
||||
* use the imperative, present tense: "change" not "changed" nor "changes"
|
||||
* don't capitalize first letter
|
||||
* no dot (.) at the end
|
||||
|
||||
### Body
|
||||
Just as in the **subject**, use the imperative, present tense: "change" not "changed" nor "changes".
|
||||
The body should include the motivation for the change and contrast this with previous behavior.
|
||||
|
||||
### Footer
|
||||
The footer should contain any information about **Breaking Changes** and is also the place to
|
||||
[reference GitHub issues that this commit closes](https://help.github.com/articles/closing-issues-via-commit-messages/).
|
||||
|
||||
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines.
|
||||
The rest of the commit message is then used for this.
|
||||
|
||||
## Submit a pull request
|
||||
|
||||
Push your branch to your `openlist` fork and open a pull request against the
|
||||
`main` branch.
|
||||
46
Dockerfile
46
Dockerfile
@@ -1,14 +1,42 @@
|
||||
FROM alpine:edge as builder
|
||||
FROM docker.io/library/alpine:edge AS builder
|
||||
LABEL stage=go-builder
|
||||
WORKDIR /app/
|
||||
RUN apk add --no-cache bash curl jq gcc git go musl-dev
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY ./ ./
|
||||
RUN apk add --no-cache bash git go gcc musl-dev; \
|
||||
sh build.sh docker
|
||||
RUN bash build.sh release docker
|
||||
|
||||
FROM alpine:edge
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
WORKDIR /opt/alist/
|
||||
COPY --from=builder /app/bin/alist ./
|
||||
EXPOSE 5244
|
||||
CMD [ "./alist" ]
|
||||
|
||||
ARG INSTALL_FFMPEG=false
|
||||
ARG INSTALL_ARIA2=false
|
||||
LABEL MAINTAINER="OpenList"
|
||||
|
||||
WORKDIR /opt/openlist/
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
||||
mkdir -p /opt/aria2/.aria2 && \
|
||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
touch /opt/aria2/.aria2/aria2.session && \
|
||||
/opt/aria2/.aria2/tracker.sh ; \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY --chmod=755 --from=builder /app/bin/openlist ./
|
||||
COPY --chmod=755 entrypoint.sh /entrypoint.sh
|
||||
RUN /entrypoint.sh version
|
||||
|
||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
||||
VOLUME /opt/openlist/data/
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
||||
|
||||
34
Dockerfile.ci
Normal file
34
Dockerfile.ci
Normal file
@@ -0,0 +1,34 @@
|
||||
FROM docker.io/library/alpine:edge
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG INSTALL_FFMPEG=false
|
||||
ARG INSTALL_ARIA2=false
|
||||
LABEL MAINTAINER="OpenList"
|
||||
|
||||
WORKDIR /opt/openlist/
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
||||
mkdir -p /opt/aria2/.aria2 && \
|
||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
touch /opt/aria2/.aria2/aria2.session && \
|
||||
/opt/aria2/.aria2/tracker.sh ; \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY --chmod=755 /build/${TARGETPLATFORM}/openlist ./
|
||||
COPY --chmod=755 entrypoint.sh /entrypoint.sh
|
||||
RUN /entrypoint.sh version
|
||||
|
||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
||||
VOLUME /opt/openlist/data/
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
||||
674
LICENSE
674
LICENSE
@@ -1,21 +1,661 @@
|
||||
MIT License
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (c) 2020 Xhofe
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
137
README.md
Executable file → Normal file
137
README.md
Executable file → Normal file
@@ -1,65 +1,120 @@
|
||||
<div align="center">
|
||||
<h3><a href="https://alist.nn.ci">Alist</a></h3>
|
||||
<p><em>🗂️Another file list program that supports multiple storage, powered by Gin and React.</em></p>
|
||||
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/release/Xhofe/alist?style=flat-square" alt="latest version"></a>
|
||||
<a href="https://github.com/Xhofe/alist/discussions"><img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936&style=flat-square" alt="discussions"></a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build?style=flat-square" alt="Build status"></a>
|
||||
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/downloads/Xhofe/alist/total?style=flat-square&color=%239F7AEA" alt="Downloads"></a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/v2/LICENSE"><img src="https://img.shields.io/github/license/Xhofe/alist?style=flat-square" alt="License"></a>
|
||||
<a href="https://pay.xhofe.top">
|
||||
<img src="https://img.shields.io/badge/%24-donate-ff69b4.svg?style=flat-square" alt="donate">
|
||||
</a>
|
||||
</div>
|
||||
<img style="width: 128px; height: 128px;" src="https://raw.githubusercontent.com/OpenListTeam/Logo/main/logo.svg" alt="logo" />
|
||||
|
||||
<p><em>OpenList is a resilient, long-term governance, community-driven fork of AList — built to defend open source against trust-based attacks.</em></p>
|
||||
|
||||
<img src="https://goreportcard.com/badge/github.com/OpenListTeam/OpenList/v3" alt="latest version" />
|
||||
<a href="https://github.com/OpenListTeam/OpenList/blob/main/LICENSE"><img src="https://img.shields.io/github/license/OpenListTeam/OpenList" alt="License" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/actions/workflow/status/OpenListTeam/OpenList/build.yml?branch=main" alt="Build status" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/release/OpenListTeam/OpenList" alt="latest version" /></a>
|
||||
|
||||
<a href="https://github.com/OpenListTeam/OpenList/discussions"><img src="https://img.shields.io/github/discussions/OpenListTeam/OpenList?color=%23ED8936" alt="discussions" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/downloads/OpenListTeam/OpenList/total?color=%239F7AEA&logo=github" alt="Downloads" /></a>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
English | [中文](./README_cn.md)
|
||||
- English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Dutch](./README_nl.md)
|
||||
|
||||
- [Contributing](./CONTRIBUTING.md)
|
||||
- [CODE OF CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
- [LICENSE](./LICENSE)
|
||||
|
||||
## Features
|
||||
|
||||
- [x] multiple storage
|
||||
- [x] Multiple storages
|
||||
- [x] Local storage
|
||||
- [x] [aliyundrive](https://www.aliyundrive.com/)
|
||||
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [189cloud](https://cloud.189.cn)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
- [x] [123pan](https://www.123pan.com/)
|
||||
- [x] [lanzou](https://pc.woozooo.com/)
|
||||
- [x] [Alist](https://github.com/Xhofe/alist)
|
||||
- [x] FTP
|
||||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [Aliyundrive](https://www.alipan.com)
|
||||
- [x] OneDrive / Sharepoint ([Global](https://www.microsoft.com/en-us/microsoft-365/onedrive/online-cloud-storage), [CN](https://portal.partner.microsoftonline.cn), DE, US)
|
||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||
- [x] [GoogleDrive](https://drive.google.com)
|
||||
- [x] [123pan](https://www.123pan.com)
|
||||
- [x] [FTP / SFTP](https://en.wikipedia.org/wiki/File_Transfer_Protocol)
|
||||
- [x] [PikPak](https://www.mypikpak.com)
|
||||
- [x] [S3](https://aws.amazon.com/s3)
|
||||
- [x] [Seafile](https://seafile.com)
|
||||
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||
- [x] [WebDAV](https://en.wikipedia.org/wiki/WebDAV)
|
||||
- [x] Teambition([China](https://www.teambition.com), [International](https://us.teambition.com))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn)
|
||||
- [x] [139yun](https://yun.139.com) (Personal, Family, Group)
|
||||
- [x] [YandexDisk](https://disk.yandex.com)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC](https://drive.uc.cn)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com)
|
||||
- [x] [ILanzou](https://www.ilanzou.com)
|
||||
- [x] [Aliyundrive share](https://www.alipan.com)
|
||||
- [x] [Google photo](https://photos.google.com)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com)
|
||||
- [x] [SMB](https://en.wikipedia.org/wiki/Server_Message_Block)
|
||||
- [x] [115](https://115.com)
|
||||
- [X] [Cloudreve](https://cloudreve.org)
|
||||
- [x] [Dropbox](https://www.dropbox.com)
|
||||
- [x] [FeijiPan](https://www.feijipan.com)
|
||||
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
||||
- [x] [Azure Blob Storage](https://azure.microsoft.com/products/storage/blobs)
|
||||
- [x] Easy to deploy and out-of-the-box
|
||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||
- [x] Image preview in gallery mode
|
||||
- [x] Video and audio preview (mp4, mp3, ...)
|
||||
- [x] Video and audio preview, support lyrics and subtitles
|
||||
- [x] Office documents preview (docx, pptx, xlsx, ...)
|
||||
- [x] `README.md` preview rendering
|
||||
- [x] File permalink copy and direct file download
|
||||
- [x] Dark mode
|
||||
- [x] I18n
|
||||
- [x] Protected routes (password protection and authentication)
|
||||
- [x] WebDav (readonly)
|
||||
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
|
||||
- [x] Cloudflare workers proxy
|
||||
|
||||
## Discussion
|
||||
|
||||
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports only.**
|
||||
|
||||
## Demo
|
||||
|
||||
Available at: <https://alist.nn.ci>.
|
||||
|
||||

|
||||
- [x] Protected routes (password protection and authentication)
|
||||
- [x] WebDAV
|
||||
- [x] Docker Deploy
|
||||
- [x] Cloudflare Workers proxy
|
||||
- [x] File/Folder package download
|
||||
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
|
||||
- [x] Offline download
|
||||
- [x] Copy files between two storage
|
||||
- [x] Multi-thread downloading acceleration for single-thread download/stream
|
||||
|
||||
## Document
|
||||
|
||||
<https://alist-doc.nn.ci/en/>
|
||||
- 📘 [Docs & Install Guide](https://docs.oplist.org)
|
||||
- 📚 [Backup Docs Site](https://docs.openlist.team)
|
||||
|
||||
## Demo
|
||||
|
||||
N/A (to be rebuilt)
|
||||
|
||||
## Discussion
|
||||
|
||||
Please refer to [*Discussions*](https://github.com/OpenListTeam/OpenList/discussions) for raising general questions, ***Issues* is for bug reports and feature requests only.**
|
||||
|
||||
## License
|
||||
|
||||
The `AList` is open-source software licensed under the MIT license.
|
||||
The `OpenList` is open-source software licensed under the [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) license.
|
||||
|
||||
---
|
||||
## Disclaimer
|
||||
|
||||
> [@Blog](https://www.nn.ci/) · [@GitHub](https://github.com/Xhofe)
|
||||
- This project is a free and open-source software designed to facilitate file sharing via net disks, primarily intended to support the downloading and learning of the Go programming language.
|
||||
- Please comply with all applicable laws and regulations when using this software. Any form of misuse is strictly prohibited.
|
||||
- The software is based on official SDKs or APIs without any modification, disruption, or interference with their behavior.
|
||||
- It only performs HTTP 302 redirects or traffic forwarding, and does not intercept, store, or tamper with any user data.
|
||||
- This project is not affiliated with any official platform or service provider.
|
||||
- The software is provided "as is", without any warranties of any kind, either express or implied, including but not limited to warranties of merchantability or fitness for a particular purpose.
|
||||
- The maintainers are not liable for any direct or indirect damages arising from the use of, or inability to use, this software.
|
||||
- You are solely responsible for any risks associated with using this software, including but not limited to account bans or download speed limitations.
|
||||
- This project is licensed under the [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) License. Please see the [LICENSE](./LICENSE) file for details.
|
||||
|
||||
## Contact Us
|
||||
|
||||
- [@GitHub](https://github.com/OpenListTeam)
|
||||
- [Telegram Group](https://t.me/OpenListTeam)
|
||||
- [Telegram Channel](https://t.me/OpenListOfficial)
|
||||
|
||||
## Contributors
|
||||
|
||||
We sincerely thank the author [Xhofe](https://github.com/Xhofe) of the original project [AlistGo/alist](https://github.com/AlistGo/alist) and all other contributors.
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
[](https://github.com/OpenListTeam/OpenList/graphs/contributors)
|
||||
|
||||
144
README_cn.md
144
README_cn.md
@@ -1,64 +1,120 @@
|
||||
<div align="center">
|
||||
<h3><a href="https://alist.nn.ci">Alist</a></h3>
|
||||
<p><em>🗂️一个支持多存储的文件列表程序,使用 Gin 和 React 。</em></p>
|
||||
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/release/Xhofe/alist?style=flat-square" alt="latest version"></a>
|
||||
<a href="https://github.com/Xhofe/alist/discussions"><img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936&style=flat-square" alt="discussions"></a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build?style=flat-square" alt="Build status"></a>
|
||||
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/downloads/Xhofe/alist/total?style=flat-square&color=%239F7AEA" alt="Downloads"></a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/v2/LICENSE"><img src="https://img.shields.io/github/license/Xhofe/alist?style=flat-square" alt="License"></a>
|
||||
<a href="https://pay.xhofe.top">
|
||||
<img src="https://img.shields.io/badge/%24-donate-ff69b4.svg?style=flat-square" alt="donate">
|
||||
</a>
|
||||
<img style="width: 128px; height: 128px;" src="https://raw.githubusercontent.com/OpenListTeam/Logo/main/logo.svg" alt="logo" />
|
||||
|
||||
<p><em>OpenList 是一个有韧性、长期治理、社区驱动的 AList 分支,旨在防御基于信任的开源攻击。</em></p>
|
||||
|
||||
<img src="https://goreportcard.com/badge/github.com/OpenListTeam/OpenList/v3" alt="latest version" />
|
||||
<a href="https://github.com/OpenListTeam/OpenList/blob/main/LICENSE"><img src="https://img.shields.io/github/license/OpenListTeam/OpenList" alt="License" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/actions/workflow/status/OpenListTeam/OpenList/build.yml?branch=main" alt="Build status" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/release/OpenListTeam/OpenList" alt="latest version" /></a>
|
||||
|
||||
<a href="https://github.com/OpenListTeam/OpenList/discussions"><img src="https://img.shields.io/github/discussions/OpenListTeam/OpenList?color=%23ED8936" alt="discussions" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/downloads/OpenListTeam/OpenList/total?color=%239F7AEA&logo=github" alt="Downloads" /></a>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
[English](./README.md) | 中文
|
||||
- [English](./README.md) | 中文 | [日本語](./README_ja.md) | [Dutch](./README_nl.md)
|
||||
|
||||
## 支持
|
||||
- [贡献指南](./CONTRIBUTING.md)
|
||||
- [行为准则](./CODE_OF_CONDUCT.md)
|
||||
- [许可证](./LICENSE)
|
||||
|
||||
## 功能
|
||||
|
||||
- [x] 多种存储
|
||||
- [x] 本地存储
|
||||
- [x] [阿里云盘](https://www.aliyundrive.com/)
|
||||
- [x] OneDrive / Sharepoint([国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [天翼云盘](https://cloud.189.cn)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
- [x] [123云盘](https://www.123pan.com/)
|
||||
- [x] [蓝奏云](https://pc.woozooo.com/)
|
||||
- [x] [Alist](https://github.com/Xhofe/alist)
|
||||
- [x] FTP
|
||||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||
- [x] 画廊模式下的图像预览
|
||||
- [x] 视频和音频预览(mp4、mp3 等)
|
||||
- [x] Office 文档预览(docx、pptx、xlsx、...)
|
||||
- [x] [阿里云盘](https://www.alipan.com)
|
||||
- [x] OneDrive / Sharepoint ([国际版](https://www.microsoft.com/en-us/microsoft-365/onedrive/online-cloud-storage), [中国](https://portal.partner.microsoftonline.cn), DE, US)
|
||||
- [x] [天翼云盘](https://cloud.189.cn)(个人、家庭)
|
||||
- [x] [GoogleDrive](https://drive.google.com)
|
||||
- [x] [123云盘](https://www.123pan.com)
|
||||
- [x] [FTP / SFTP](https://en.wikipedia.org/wiki/File_Transfer_Protocol)
|
||||
- [x] [PikPak](https://www.mypikpak.com)
|
||||
- [x] [S3](https://aws.amazon.com/s3)
|
||||
- [x] [Seafile](https://seafile.com)
|
||||
- [x] [又拍云对象存储](https://www.upyun.com/products/file-storage)
|
||||
- [x] [WebDAV](https://en.wikipedia.org/wiki/WebDAV)
|
||||
- [x] Teambition([中国](https://www.teambition.com), [国际](https://us.teambition.com))
|
||||
- [x] [分秒帧](https://www.mediatrack.cn)
|
||||
- [x] [和彩云](https://yun.139.com)(个人、家庭、群组)
|
||||
- [x] [YandexDisk](https://disk.yandex.com)
|
||||
- [x] [百度网盘](http://pan.baidu.com)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC网盘](https://drive.uc.cn)
|
||||
- [x] [夸克网盘](https://pan.quark.cn)
|
||||
- [x] [迅雷网盘](https://pan.xunlei.com)
|
||||
- [x] [蓝奏云](https://www.lanzou.com)
|
||||
- [x] [蓝奏云优享版](https://www.ilanzou.com)
|
||||
- [x] [阿里云盘分享](https://www.alipan.com)
|
||||
- [x] [Google 相册](https://photos.google.com)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [百度相册](https://photo.baidu.com)
|
||||
- [x] [SMB](https://en.wikipedia.org/wiki/Server_Message_Block)
|
||||
- [x] [115](https://115.com)
|
||||
- [x] [Cloudreve](https://cloudreve.org)
|
||||
- [x] [Dropbox](https://www.dropbox.com)
|
||||
- [x] [飞机盘](https://www.feijipan.com)
|
||||
- [x] [多吉云](https://www.dogecloud.com/product/oss)
|
||||
- [x] [Azure Blob Storage](https://azure.microsoft.com/products/storage/blobs)
|
||||
- [x] 部署方便,开箱即用
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本等)
|
||||
- [x] 画廊模式下的图片预览
|
||||
- [x] 视频和音频预览,支持歌词和字幕
|
||||
- [x] Office 文档预览(docx、pptx、xlsx 等)
|
||||
- [x] `README.md` 预览渲染
|
||||
- [x] 文件永久链接复制和直接文件下载
|
||||
- [x] 黑暗模式
|
||||
- [x] 国际化
|
||||
- [x] 受保护的路由(密码保护和身份验证)
|
||||
- [x] WebDav(只读)
|
||||
- [x] [Docker 部署](https://hub.docker.com/r/xhofe/alist)
|
||||
- [x] Cloudflare workers 中转
|
||||
|
||||
## 讨论
|
||||
|
||||
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) ,**issue仅针对错误报告。**
|
||||
|
||||
## 演示
|
||||
|
||||
<https://alist.nn.ci>。
|
||||
|
||||

|
||||
- [x] 受保护的路由(密码保护和认证)
|
||||
- [x] WebDAV
|
||||
- [x] Docker 部署
|
||||
- [x] Cloudflare Workers 代理
|
||||
- [x] 文件/文件夹打包下载
|
||||
- [x] 网页上传(可允许访客上传)、删除、新建文件夹、重命名、移动和复制
|
||||
- [x] 离线下载
|
||||
- [x] 跨存储复制文件
|
||||
- [x] 单文件多线程下载/流式加速
|
||||
|
||||
## 文档
|
||||
|
||||
<https://alist-doc.nn.ci/>
|
||||
- 📘 [文档与安装指南](https://docs.oplist.org)
|
||||
- 📚 [备用文档站点](https://docs.openlist.team)
|
||||
|
||||
## 许可
|
||||
## 演示
|
||||
|
||||
`AList` 是在 MIT 许可下许可的开源软件。
|
||||
N/A(待重建)
|
||||
|
||||
---
|
||||
## 讨论
|
||||
|
||||
> [@Blog](https://www.nn.ci/) · [@GitHub](https://github.com/Xhofe)
|
||||
如有一般性问题请前往 [*Discussions*](https://github.com/OpenListTeam/OpenList/discussions) 讨论区,***Issues* 仅用于错误报告和功能请求。**
|
||||
|
||||
## 许可证
|
||||
|
||||
`OpenList` 是基于 [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) 许可证的开源软件。
|
||||
|
||||
## 免责声明
|
||||
|
||||
- 本项目为免费开源软件,旨在通过网盘便捷分享文件,主要用于 Go 语言的下载与学习。
|
||||
- 使用本软件时请遵守相关法律法规,严禁任何形式的滥用。
|
||||
- 本软件基于官方 SDK 或 API 实现,未对其行为进行任何修改、破坏或干扰。
|
||||
- 仅进行 HTTP 302 跳转或流量转发,不拦截、存储或篡改任何用户数据。
|
||||
- 本项目与任何官方平台或服务提供商无关。
|
||||
- 本软件按“原样”提供,不附带任何明示或暗示的担保,包括但不限于适销性或特定用途的适用性。
|
||||
- 维护者不对因使用或无法使用本软件而导致的任何直接或间接损失负责。
|
||||
- 您需自行承担使用本软件的所有风险,包括但不限于账号被封、下载限速等。
|
||||
- 本项目遵循 [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) 许可证,详情请参见 [LICENSE](./LICENSE) 文件。
|
||||
|
||||
## 联系我们
|
||||
|
||||
- [@GitHub](https://github.com/OpenListTeam)
|
||||
- [Telegram 交流群](https://t.me/OpenListTeam)
|
||||
- [Telegram 频道](https://t.me/OpenListOfficial)
|
||||
|
||||
## 贡献者
|
||||
|
||||
我们衷心感谢原项目 [AlistGo/alist](https://github.com/AlistGo/alist) 的作者 [Xhofe](https://github.com/Xhofe) 及所有其他贡献者。
|
||||
|
||||
感谢这些优秀的人:
|
||||
|
||||
[](https://github.com/OpenListTeam/OpenList/graphs/contributors)
|
||||
|
||||
120
README_ja.md
Normal file
120
README_ja.md
Normal file
@@ -0,0 +1,120 @@
|
||||
<div align="center">
|
||||
<img style="width: 128px; height: 128px;" src="https://raw.githubusercontent.com/OpenListTeam/Logo/main/logo.svg" alt="logo" />
|
||||
|
||||
<p><em>OpenList は、信頼ベースの攻撃からオープンソースを守るために構築された、レジリエントで長期ガバナンス、コミュニティ主導の AList フォークです。</em></p>
|
||||
|
||||
<img src="https://goreportcard.com/badge/github.com/OpenListTeam/OpenList/v3" alt="latest version" />
|
||||
<a href="https://github.com/OpenListTeam/OpenList/blob/main/LICENSE"><img src="https://img.shields.io/github/license/OpenListTeam/OpenList" alt="License" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/actions/workflow/status/OpenListTeam/OpenList/build.yml?branch=main" alt="Build status" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/release/OpenListTeam/OpenList" alt="latest version" /></a>
|
||||
|
||||
<a href="https://github.com/OpenListTeam/OpenList/discussions"><img src="https://img.shields.io/github/discussions/OpenListTeam/OpenList?color=%23ED8936" alt="discussions" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/downloads/OpenListTeam/OpenList/total?color=%239F7AEA&logo=github" alt="Downloads" /></a>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
- [English](./README.md) | [中文](./README_cn.md) | 日本語 | [Dutch](./README_nl.md)
|
||||
|
||||
- [コントリビュート](./CONTRIBUTING.md)
|
||||
- [行動規範](./CODE_OF_CONDUCT.md)
|
||||
- [ライセンス](./LICENSE)
|
||||
|
||||
## 特徴
|
||||
|
||||
- [x] 複数ストレージ
|
||||
- [x] ローカルストレージ
|
||||
- [x] [Aliyundrive](https://www.alipan.com)
|
||||
- [x] OneDrive / Sharepoint ([グローバル](https://www.microsoft.com/en-us/microsoft-365/onedrive/online-cloud-storage), [中国](https://portal.partner.microsoftonline.cn), DE, US)
|
||||
- [x] [189cloud](https://cloud.189.cn)(個人、家族)
|
||||
- [x] [GoogleDrive](https://drive.google.com)
|
||||
- [x] [123pan](https://www.123pan.com)
|
||||
- [x] [FTP / SFTP](https://en.wikipedia.org/wiki/File_Transfer_Protocol)
|
||||
- [x] [PikPak](https://www.mypikpak.com)
|
||||
- [x] [S3](https://aws.amazon.com/s3)
|
||||
- [x] [Seafile](https://seafile.com)
|
||||
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||
- [x] [WebDAV](https://en.wikipedia.org/wiki/WebDAV)
|
||||
- [x] Teambition([中国](https://www.teambition.com), [国際](https://us.teambition.com))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn)
|
||||
- [x] [139yun](https://yun.139.com)(個人、家族、グループ)
|
||||
- [x] [YandexDisk](https://disk.yandex.com)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC](https://drive.uc.cn)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com)
|
||||
- [x] [ILanzou](https://www.ilanzou.com)
|
||||
- [x] [Aliyundrive share](https://www.alipan.com)
|
||||
- [x] [Google photo](https://photos.google.com)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com)
|
||||
- [x] [SMB](https://en.wikipedia.org/wiki/Server_Message_Block)
|
||||
- [x] [115](https://115.com)
|
||||
- [x] [Cloudreve](https://cloudreve.org)
|
||||
- [x] [Dropbox](https://www.dropbox.com)
|
||||
- [x] [FeijiPan](https://www.feijipan.com)
|
||||
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
||||
- [x] [Azure Blob Storage](https://azure.microsoft.com/products/storage/blobs)
|
||||
- [x] 簡単にデプロイでき、すぐに使える
|
||||
- [x] ファイルプレビュー(PDF、markdown、コード、テキストなど)
|
||||
- [x] ギャラリーモードでの画像プレビュー
|
||||
- [x] ビデオ・オーディオプレビュー、歌詞・字幕対応
|
||||
- [x] Officeドキュメントプレビュー(docx、pptx、xlsxなど)
|
||||
- [x] `README.md` プレビュー表示
|
||||
- [x] ファイルのパーマリンクコピーと直接ダウンロード
|
||||
- [x] ダークモード
|
||||
- [x] 国際化対応
|
||||
- [x] 保護されたルート(パスワード保護と認証)
|
||||
- [x] WebDAV
|
||||
- [x] Dockerデプロイ
|
||||
- [x] Cloudflare Workersプロキシ
|
||||
- [x] ファイル/フォルダのパッケージダウンロード
|
||||
- [x] Webアップロード(訪問者のアップロード許可可)、削除、フォルダ作成、リネーム、移動、コピー
|
||||
- [x] オフラインダウンロード
|
||||
- [x] ストレージ間のファイルコピー
|
||||
- [x] 単一ファイルのマルチスレッドダウンロード/ストリーム加速
|
||||
|
||||
## ドキュメント
|
||||
|
||||
- 📘 [ドキュメント・インストールガイド](https://docs.oplist.org)
|
||||
- 📚 [バックアップドキュメントサイト](https://docs.openlist.team)
|
||||
|
||||
## デモ
|
||||
|
||||
N/A(再構築中)
|
||||
|
||||
## ディスカッション
|
||||
|
||||
一般的な質問は [*Discussions*](https://github.com/OpenListTeam/OpenList/discussions) をご利用ください。***Issues* はバグ報告と機能リクエスト専用です。**
|
||||
|
||||
## ライセンス
|
||||
|
||||
「OpenList」は [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) ライセンスの下で公開されているオープンソースソフトウェアです。
|
||||
|
||||
## 免責事項
|
||||
|
||||
- 本プロジェクトは無料のオープンソースソフトウェアであり、ネットワークディスクを通じたファイル共有を容易にすることを目的とし、主に Go 言語のダウンロードと学習をサポートします。
|
||||
- 本ソフトウェアの利用にあたっては、関連する法令を遵守し、不正利用を固く禁じます。
|
||||
- 本ソフトウェアは公式 SDK または API に基づいており、その動作を一切改変・破壊・妨害しません。
|
||||
- 302 リダイレクトまたはトラフィック転送のみを行い、ユーザーデータの傍受・保存・改ざんは一切行いません。
|
||||
- 本プロジェクトは、いかなる公式プラットフォームやサービスプロバイダーとも関係ありません。
|
||||
- 本ソフトウェアは「現状有姿」で提供されており、商品性や特定目的への適合性を含むいかなる保証もありません。
|
||||
- 本ソフトウェアの使用または使用不能によるいかなる直接的・間接的損害についても、メンテナは責任を負いません。
|
||||
- 本ソフトウェアの利用に伴うすべてのリスク(アカウントの凍結やダウンロード速度制限などを含む)は、利用者自身が負うものとします。
|
||||
- 本プロジェクトは [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) ライセンスに従います。詳細は [LICENSE](./LICENSE) ファイルをご覧ください。
|
||||
|
||||
## お問い合わせ
|
||||
|
||||
- [@GitHub](https://github.com/OpenListTeam)
|
||||
- [Telegram グループ](https://t.me/OpenListTeam)
|
||||
- [Telegram チャンネル](https://t.me/OpenListOfficial)
|
||||
|
||||
## コントリビューター
|
||||
|
||||
オリジナルプロジェクト [AlistGo/alist](https://github.com/AlistGo/alist) の作者 [Xhofe](https://github.com/Xhofe) およびその他すべての貢献者に心より感謝いたします。
|
||||
|
||||
素晴らしい皆様に感謝します:
|
||||
|
||||
[](https://github.com/OpenListTeam/OpenList/graphs/contributors)
|
||||
120
README_nl.md
Normal file
120
README_nl.md
Normal file
@@ -0,0 +1,120 @@
|
||||
<div align="center">
|
||||
<img style="width: 128px; height: 128px;" src="https://raw.githubusercontent.com/OpenListTeam/Logo/main/logo.svg" alt="logo" />
|
||||
|
||||
<p><em>OpenList is een veerkrachtige, langetermijn, door de gemeenschap geleide fork van AList — gebouwd om open source te beschermen tegen op vertrouwen gebaseerde aanvallen.</em></p>
|
||||
|
||||
<img src="https://goreportcard.com/badge/github.com/OpenListTeam/OpenList/v3" alt="latest version" />
|
||||
<a href="https://github.com/OpenListTeam/OpenList/blob/main/LICENSE"><img src="https://img.shields.io/github/license/OpenListTeam/OpenList" alt="License" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/actions/workflow/status/OpenListTeam/OpenList/build.yml?branch=main" alt="Build status" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/release/OpenListTeam/OpenList" alt="latest version" /></a>
|
||||
|
||||
<a href="https://github.com/OpenListTeam/OpenList/discussions"><img src="https://img.shields.io/github/discussions/OpenListTeam/OpenList?color=%23ED8936" alt="discussions" /></a>
|
||||
<a href="https://github.com/OpenListTeam/OpenList/releases"><img src="https://img.shields.io/github/downloads/OpenListTeam/OpenList/total?color=%239F7AEA&logo=github" alt="Downloads" /></a>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
- [English](./README.md) | [中文](./README_cn.md) | [日本語](./README_ja.md) | Dutch
|
||||
|
||||
- [Bijdragen](./CONTRIBUTING.md)
|
||||
- [Gedragscode](./CODE_OF_CONDUCT.md)
|
||||
- [Licentie](./LICENSE)
|
||||
|
||||
## Functies
|
||||
|
||||
- [x] Meerdere opslagmogelijkheden
|
||||
- [x] Lokale opslag
|
||||
- [x] [Aliyundrive](https://www.alipan.com)
|
||||
- [x] OneDrive / Sharepoint ([Global](https://www.microsoft.com/en-us/microsoft-365/onedrive/online-cloud-storage), [CN](https://portal.partner.microsoftonline.cn), DE, US)
|
||||
- [x] [189cloud](https://cloud.189.cn) (Persoonlijk, Familie)
|
||||
- [x] [GoogleDrive](https://drive.google.com)
|
||||
- [x] [123pan](https://www.123pan.com)
|
||||
- [x] [FTP / SFTP](https://en.wikipedia.org/wiki/File_Transfer_Protocol)
|
||||
- [x] [PikPak](https://www.mypikpak.com)
|
||||
- [x] [S3](https://aws.amazon.com/s3)
|
||||
- [x] [Seafile](https://seafile.com)
|
||||
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||
- [x] [WebDAV](https://en.wikipedia.org/wiki/WebDAV)
|
||||
- [x] Teambition([China](https://www.teambition.com), [Internationaal](https://us.teambition.com))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn)
|
||||
- [x] [139yun](https://yun.139.com) (Persoonlijk, Familie, Groep)
|
||||
- [x] [YandexDisk](https://disk.yandex.com)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC](https://drive.uc.cn)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com)
|
||||
- [x] [ILanzou](https://www.ilanzou.com)
|
||||
- [x] [Aliyundrive share](https://www.alipan.com)
|
||||
- [x] [Google photo](https://photos.google.com)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com)
|
||||
- [x] [SMB](https://en.wikipedia.org/wiki/Server_Message_Block)
|
||||
- [x] [115](https://115.com)
|
||||
- [x] [Cloudreve](https://cloudreve.org)
|
||||
- [x] [Dropbox](https://www.dropbox.com)
|
||||
- [x] [FeijiPan](https://www.feijipan.com)
|
||||
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
||||
- [x] [Azure Blob Storage](https://azure.microsoft.com/products/storage/blobs)
|
||||
- [x] Eenvoudig te implementeren en direct te gebruiken
|
||||
- [x] Bestandsvoorbeeld (PDF, markdown, code, platte tekst, ...)
|
||||
- [x] Afbeeldingsvoorbeeld in galerijweergave
|
||||
- [x] Video- en audiovoorbeeld, ondersteuning voor songteksten en ondertitels
|
||||
- [x] Office-documenten voorbeeld (docx, pptx, xlsx, ...)
|
||||
- [x] `README.md` voorbeeldweergave
|
||||
- [x] Permalink kopiëren en direct downloaden van bestanden
|
||||
- [x] Donkere modus
|
||||
- [x] I18n
|
||||
- [x] Beschermde routes (wachtwoordbeveiliging en authenticatie)
|
||||
- [x] WebDAV
|
||||
- [x] Docker implementatie
|
||||
- [x] Cloudflare Workers proxy
|
||||
- [x] Bestands-/map-pakket download
|
||||
- [x] Webupload (bezoekers kunnen uploaden toestaan), verwijderen, map aanmaken, hernoemen, verplaatsen en kopiëren
|
||||
- [x] Offline download
|
||||
- [x] Bestanden kopiëren tussen twee opslaglocaties
|
||||
- [x] Multi-thread downloadversnelling voor enkelvoudige download/stream
|
||||
|
||||
## Documentatie
|
||||
|
||||
- 📘 [Documentatie & Installatiegids](https://docs.oplist.org)
|
||||
- 📚 [Back-up documentatiesite](https://docs.openlist.team)
|
||||
|
||||
## Demo
|
||||
|
||||
N.v.t. (wordt opnieuw opgebouwd)
|
||||
|
||||
## Discussie
|
||||
|
||||
Stel algemene vragen in [*Discussions*](https://github.com/OpenListTeam/OpenList/discussions), ***Issues* zijn alleen voor bugmeldingen en feature requests.**
|
||||
|
||||
## Licentie
|
||||
|
||||
`OpenList` is open-source software onder de [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) licentie.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
- Dit project is gratis en open-source software, ontworpen om het delen van bestanden via netdisks te vergemakkelijken, voornamelijk bedoeld ter ondersteuning van het downloaden en leren van de programmeertaal Go.
|
||||
- Houd u bij het gebruik van deze software aan alle toepasselijke wetten en voorschriften. Elk misbruik is ten strengste verboden.
|
||||
- De software is gebaseerd op officiële SDK's of API's zonder enige wijziging, verstoring of beïnvloeding van hun gedrag.
|
||||
- Het voert alleen HTTP 302-omleidingen of verkeersdoorsturing uit en onderschept, slaat of wijzigt geen gebruikersgegevens.
|
||||
- Dit project is niet gelieerd aan enig officieel platform of dienstverlener.
|
||||
- De software wordt geleverd "zoals deze is", zonder enige vorm van garantie, expliciet of impliciet, inclusief maar niet beperkt tot garanties van verkoopbaarheid of geschiktheid voor een bepaald doel.
|
||||
- De beheerders zijn niet aansprakelijk voor enige directe of indirecte schade die voortvloeit uit het gebruik van of het onvermogen om deze software te gebruiken.
|
||||
- U bent zelf verantwoordelijk voor alle risico's die gepaard gaan met het gebruik van deze software, inclusief maar niet beperkt tot accountblokkades of downloadbeperkingen.
|
||||
- Dit project valt onder de [AGPL-3.0](https://www.gnu.org/licenses/agpl-3.0.txt) licentie. Zie het [LICENSE](./LICENSE) bestand voor details.
|
||||
|
||||
## Contact
|
||||
|
||||
- [@GitHub](https://github.com/OpenListTeam)
|
||||
- [Telegram Groep](https://t.me/OpenListTeam)
|
||||
- [Telegram Kanaal](https://t.me/OpenListOfficial)
|
||||
|
||||
## Bijdragers
|
||||
|
||||
Wij danken de auteur [Xhofe](https://github.com/Xhofe) van het originele project [AlistGo/alist](https://github.com/AlistGo/alist) en alle andere bijdragers.
|
||||
|
||||
Dank aan deze geweldige mensen:
|
||||
|
||||
[](https://github.com/OpenListTeam/OpenList/graphs/contributors)
|
||||
125
alist-proxy.js
125
alist-proxy.js
@@ -1,125 +0,0 @@
|
||||
const HOST = "YOUR_HOST";
|
||||
const TOKEN = "YOUR_TOKEN";
|
||||
|
||||
const corsHeaders = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET,HEAD,POST,OPTIONS",
|
||||
"Access-Control-Max-Age": "86400",
|
||||
};
|
||||
|
||||
!function(a){"use strict";function b(a,b){var c=(65535&a)+(65535&b),d=(a>>16)+(b>>16)+(c>>16);return d<<16|65535&c}function c(a,b){return a<<b|a>>>32-b}function d(a,d,e,f,g,h){return b(c(b(b(d,a),b(f,h)),g),e)}function e(a,b,c,e,f,g,h){return d(b&c|~b&e,a,b,f,g,h)}function f(a,b,c,e,f,g,h){return d(b&e|c&~e,a,b,f,g,h)}function g(a,b,c,e,f,g,h){return d(b^c^e,a,b,f,g,h)}function h(a,b,c,e,f,g,h){return d(c^(b|~e),a,b,f,g,h)}function i(a,c){a[c>>5]|=128<<c%32,a[(c+64>>>9<<4)+14]=c;var d,i,j,k,l,m=1732584193,n=-271733879,o=-1732584194,p=271733878;for(d=0;d<a.length;d+=16)i=m,j=n,k=o,l=p,m=e(m,n,o,p,a[d],7,-680876936),p=e(p,m,n,o,a[d+1],12,-389564586),o=e(o,p,m,n,a[d+2],17,606105819),n=e(n,o,p,m,a[d+3],22,-1044525330),m=e(m,n,o,p,a[d+4],7,-176418897),p=e(p,m,n,o,a[d+5],12,1200080426),o=e(o,p,m,n,a[d+6],17,-1473231341),n=e(n,o,p,m,a[d+7],22,-45705983),m=e(m,n,o,p,a[d+8],7,1770035416),p=e(p,m,n,o,a[d+9],12,-1958414417),o=e(o,p,m,n,a[d+10],17,-42063),n=e(n,o,p,m,a[d+11],22,-1990404162),m=e(m,n,o,p,a[d+12],7,1804603682),p=e(p,m,n,o,a[d+13],12,-40341101),o=e(o,p,m,n,a[d+14],17,-1502002290),n=e(n,o,p,m,a[d+15],22,1236535329),m=f(m,n,o,p,a[d+1],5,-165796510),p=f(p,m,n,o,a[d+6],9,-1069501632),o=f(o,p,m,n,a[d+11],14,643717713),n=f(n,o,p,m,a[d],20,-373897302),m=f(m,n,o,p,a[d+5],5,-701558691),p=f(p,m,n,o,a[d+10],9,38016083),o=f(o,p,m,n,a[d+15],14,-660478335),n=f(n,o,p,m,a[d+4],20,-405537848),m=f(m,n,o,p,a[d+9],5,568446438),p=f(p,m,n,o,a[d+14],9,-1019803690),o=f(o,p,m,n,a[d+3],14,-187363961),n=f(n,o,p,m,a[d+8],20,1163531501),m=f(m,n,o,p,a[d+13],5,-1444681467),p=f(p,m,n,o,a[d+2],9,-51403784),o=f(o,p,m,n,a[d+7],14,1735328473),n=f(n,o,p,m,a[d+12],20,-1926607734),m=g(m,n,o,p,a[d+5],4,-378558),p=g(p,m,n,o,a[d+8],11,-2022574463),o=g(o,p,m,n,a[d+11],16,1839030562),n=g(n,o,p,m,a[d+14],23,-35309556),m=g(m,n,o,p,a[d+1],4,-1530992060),p=g(p,m,n,o,a[d+4],11,1272893353),o=g(o,p,m,n,a[d+7],16,-155497632),n=g(n,o,p,m,a[d+10],23,-1094730640),m=g(m,n,o,p,a[d+13],4,681279174),p=g(p,m,n,o,a[d],11,-358537222),o=g(o,p,m,n,a[d+3],16,-722521979),n=g(n,o,p,m,a[d+6],23,76029189),m=g(m,n,o,p,a[d+9],4,-640364487),p=g(p,m,n,o,a[d+12],11,-421815835),o=g(o,p,m,n,a[d+15],16,530742520),n=g(n,o,p,m,a[d+2],23,-995338651),m=h(m,n,o,p,a[d],6,-198630844),p=h(p,m,n,o,a[d+7],10,1126891415),o=h(o,p,m,n,a[d+14],15,-1416354905),n=h(n,o,p,m,a[d+5],21,-57434055),m=h(m,n,o,p,a[d+12],6,1700485571),p=h(p,m,n,o,a[d+3],10,-1894986606),o=h(o,p,m,n,a[d+10],15,-1051523),n=h(n,o,p,m,a[d+1],21,-2054922799),m=h(m,n,o,p,a[d+8],6,1873313359),p=h(p,m,n,o,a[d+15],10,-30611744),o=h(o,p,m,n,a[d+6],15,-1560198380),n=h(n,o,p,m,a[d+13],21,1309151649),m=h(m,n,o,p,a[d+4],6,-145523070),p=h(p,m,n,o,a[d+11],10,-1120210379),o=h(o,p,m,n,a[d+2],15,718787259),n=h(n,o,p,m,a[d+9],21,-343485551),m=b(m,i),n=b(n,j),o=b(o,k),p=b(p,l);return[m,n,o,p]}function j(a){var b,c="";for(b=0;b<32*a.length;b+=8)c+=String.fromCharCode(a[b>>5]>>>b%32&255);return c}function k(a){var b,c=[];for(c[(a.length>>2)-1]=void 0,b=0;b<c.length;b+=1)c[b]=0;for(b=0;b<8*a.length;b+=8)c[b>>5]|=(255&a.charCodeAt(b/8))<<b%32;return c}function l(a){return j(i(k(a),8*a.length))}function m(a,b){var c,d,e=k(a),f=[],g=[];for(f[15]=g[15]=void 0,e.length>16&&(e=i(e,8*a.length)),c=0;16>c;c+=1)f[c]=909522486^e[c],g[c]=1549556828^e[c];return d=i(f.concat(k(b)),512+8*b.length),j(i(g.concat(d),640))}function n(a){var b,c,d="0123456789abcdef",e="";for(c=0;c<a.length;c+=1)b=a.charCodeAt(c),e+=d.charAt(b>>>4&15)+d.charAt(15&b);return e}function o(a){return unescape(encodeURIComponent(a))}function p(a){return l(o(a))}function q(a){return n(p(a))}function r(a,b){return m(o(a),o(b))}function s(a,b){return n(r(a,b))}function t(a,b,c){return b?c?r(b,a):s(b,a):c?p(a):q(a)}"function"==typeof define&&define.amd?define(function(){return t}):a.md5=t}(this);
|
||||
|
||||
async function handleRequest(request) {
|
||||
const origin = request.headers.get("origin");
|
||||
const url = new URL(request.url);
|
||||
const path = decodeURI(url.pathname);
|
||||
const sign = url.searchParams.get("sign");
|
||||
const name = path.split("/").pop();
|
||||
const right = md5(`alist-${TOKEN}-${name}`).slice(8, 24);
|
||||
if (sign !== right){
|
||||
const resp = new Response(
|
||||
JSON.stringify({
|
||||
code: 401,
|
||||
message: `sign mismatch`,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"content-type": "application/json;charset=UTF-8",
|
||||
},
|
||||
}
|
||||
);
|
||||
resp.headers.set("Access-Control-Allow-Origin", origin);
|
||||
return resp;
|
||||
}
|
||||
|
||||
let resp = await fetch(`${HOST}/api/admin/link`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"content-type": "application/json;charset=UTF-8",
|
||||
Authorization: TOKEN,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
path: path,
|
||||
}),
|
||||
});
|
||||
let res = await resp.json();
|
||||
if (res.code !== 200) {
|
||||
return new Response(JSON.stringify(res));
|
||||
}
|
||||
request = new Request(res.data.url, request);
|
||||
if (res.data.headers) {
|
||||
for(const header of res.data.headers){
|
||||
request.headers.set(header.name, header.value);
|
||||
}
|
||||
}
|
||||
let response = await fetch(request);
|
||||
|
||||
// Recreate the response so we can modify the headers
|
||||
response = new Response(response.body, response);
|
||||
|
||||
// Set CORS headers
|
||||
response.headers.set("Access-Control-Allow-Origin", origin);
|
||||
|
||||
// Append to/Add Vary header so browser will cache response correctly
|
||||
response.headers.append("Vary", "Origin");
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
function handleOptions(request) {
|
||||
// Make sure the necessary headers are present
|
||||
// for this to be a valid pre-flight request
|
||||
let headers = request.headers;
|
||||
if (
|
||||
headers.get("Origin") !== null &&
|
||||
headers.get("Access-Control-Request-Method") !== null
|
||||
// && headers.get("Access-Control-Request-Headers") !== null
|
||||
) {
|
||||
// Handle CORS pre-flight request.
|
||||
// If you want to check or reject the requested method + headers
|
||||
// you can do that here.
|
||||
let respHeaders = {
|
||||
...corsHeaders,
|
||||
// Allow all future content Request headers to go back to browser
|
||||
// such as Authorization (Bearer) or X-Client-Name-Version
|
||||
"Access-Control-Allow-Headers": request.headers.get(
|
||||
"Access-Control-Request-Headers"
|
||||
),
|
||||
};
|
||||
|
||||
return new Response(null, {
|
||||
headers: respHeaders,
|
||||
});
|
||||
} else {
|
||||
// Handle standard OPTIONS request.
|
||||
// If you want to allow other HTTP Methods, you can do that here.
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
Allow: "GET, HEAD, POST, OPTIONS",
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
addEventListener("fetch", (event) => {
|
||||
const request = event.request;
|
||||
// const url = new URL(request.url)
|
||||
if (request.method === "OPTIONS") {
|
||||
// Handle CORS preflight requests
|
||||
event.respondWith(handleOptions(request));
|
||||
} else if (
|
||||
request.method === "GET" ||
|
||||
request.method === "HEAD" ||
|
||||
request.method === "POST"
|
||||
) {
|
||||
// Handle requests to the API server
|
||||
event.respondWith(handleRequest(request));
|
||||
} else {
|
||||
event.respondWith(
|
||||
new Response(null, {
|
||||
status: 405,
|
||||
statusText: "Method Not Allowed",
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
58
alist.go
58
alist.go
@@ -1,58 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/bootstrap"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
_ "github.com/Xhofe/alist/drivers"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/server"
|
||||
"github.com/gin-gonic/gin"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func Init() bool {
|
||||
//bootstrap.InitLog()
|
||||
bootstrap.InitConf()
|
||||
bootstrap.InitCron()
|
||||
bootstrap.InitModel()
|
||||
if conf.Password {
|
||||
pass, err := model.GetSettingByKey("password")
|
||||
if err != nil {
|
||||
log.Errorf(err.Error())
|
||||
return false
|
||||
}
|
||||
log.Infof("current password: %s", pass.Value)
|
||||
return false
|
||||
}
|
||||
bootstrap.InitSettings()
|
||||
bootstrap.InitAccounts()
|
||||
bootstrap.InitCache()
|
||||
return true
|
||||
}
|
||||
|
||||
func main() {
|
||||
if conf.Version {
|
||||
fmt.Printf("Built At: %s\nGo Version: %s\nAuthor: %s\nCommit ID: %s\nVersion: %s\n", conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.GitTag)
|
||||
return
|
||||
}
|
||||
if !Init() {
|
||||
return
|
||||
}
|
||||
if !conf.Debug {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
}
|
||||
r := gin.Default()
|
||||
server.InitApiRouter(r)
|
||||
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
|
||||
log.Infof("start server @ %s", base)
|
||||
var err error
|
||||
if conf.Conf.Https {
|
||||
err = r.RunTLS(base, conf.Conf.CertFile, conf.Conf.KeyFile)
|
||||
} else {
|
||||
err = r.Run(base)
|
||||
}
|
||||
if err != nil {
|
||||
log.Errorf("failed to start: %s", err.Error())
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestUrl(t *testing.T) {
|
||||
s,_ := url.QueryUnescape("/ali/%E7%8C%AA%E5%A4%B4%E7%9A%84%E6%96%87%E4%BB%B6%5B%E5%98%BF%E5%98%BF%5D/%E9%82%B9%E9%82%B9%E7%9A%84%E6%96%87%E4%BB%B6/%E6%A1%8C%E9%9D%A2%E5%A3%81%E7%BA%B8/v2-e8f266ba17ae387eefed1cb22b2b5e4e_r.jpg")
|
||||
fmt.Print(s)
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func InitAccounts() {
|
||||
log.Infof("init accounts...")
|
||||
var accounts []model.Account
|
||||
if err := conf.DB.Find(&accounts).Error; err != nil {
|
||||
log.Fatalf("failed sync init accounts")
|
||||
}
|
||||
for i, account := range accounts {
|
||||
model.RegisterAccount(account)
|
||||
driver, ok := base.GetDriver(account.Type)
|
||||
if !ok {
|
||||
log.Errorf("no [%s] driver", account.Type)
|
||||
} else {
|
||||
err := driver.Save(&accounts[i], nil)
|
||||
if err != nil {
|
||||
log.Errorf("init account [%s] error:[%s]", account.Name, err.Error())
|
||||
} else {
|
||||
log.Infof("success init account: %s, type: %s", account.Name, account.Type)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/eko/gocache/v2/cache"
|
||||
"github.com/eko/gocache/v2/store"
|
||||
goCache "github.com/patrickmn/go-cache"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"time"
|
||||
)
|
||||
|
||||
// InitCache init cache
|
||||
func InitCache() {
|
||||
log.Infof("init cache...")
|
||||
goCacheClient := goCache.New(60*time.Minute, 120*time.Minute)
|
||||
goCacheStore := store.NewGoCache(goCacheClient, nil)
|
||||
conf.Cache = cache.New(goCacheStore)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// InitConf init config
|
||||
func InitConf() {
|
||||
log.Infof("reading config file: %s", conf.ConfigFile)
|
||||
if !utils.Exists(conf.ConfigFile) {
|
||||
log.Infof("config file not exists, creating default config file")
|
||||
_, err := utils.CreatNestedFile(conf.ConfigFile)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to create config file")
|
||||
}
|
||||
conf.Conf = conf.DefaultConfig()
|
||||
if !utils.WriteToJson(conf.ConfigFile, conf.Conf) {
|
||||
log.Fatalf("failed to create default config file")
|
||||
}
|
||||
return
|
||||
}
|
||||
config, err := ioutil.ReadFile(conf.ConfigFile)
|
||||
if err != nil {
|
||||
log.Fatalf("reading config file error:%s", err.Error())
|
||||
}
|
||||
conf.Conf = new(conf.Config)
|
||||
err = json.Unmarshal(config, conf.Conf)
|
||||
if err != nil {
|
||||
log.Fatalf("load config error: %s", err.Error())
|
||||
}
|
||||
log.Debugf("config:%+v", conf.Conf)
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/robfig/cron/v3"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// InitCron init cron
|
||||
func InitCron() {
|
||||
log.Infof("init cron...")
|
||||
conf.Cron = cron.New()
|
||||
conf.Cron.Start()
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// InitLog init log
|
||||
func InitLog() {
|
||||
if conf.Debug {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetReportCaller(true)
|
||||
}
|
||||
log.SetFormatter(&log.TextFormatter{
|
||||
//DisableColors: true,
|
||||
ForceColors: true,
|
||||
EnvironmentOverrideColors: true,
|
||||
TimestampFormat: "2006-01-02 15:04:05",
|
||||
FullTimestamp: true,
|
||||
})
|
||||
log.Infof("init log...")
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.StringVar(&conf.ConfigFile, "conf", "data/config.json", "config file")
|
||||
flag.BoolVar(&conf.Debug, "debug", false, "start with debug mode")
|
||||
flag.BoolVar(&conf.Version, "version", false, "print version info")
|
||||
flag.BoolVar(&conf.Password, "password", false, "print current password")
|
||||
flag.Parse()
|
||||
InitLog()
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/model"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"gorm.io/driver/mysql"
|
||||
"gorm.io/driver/postgres"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/logger"
|
||||
"gorm.io/gorm/schema"
|
||||
log2 "log"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func InitModel() {
|
||||
log.Infof("init model...")
|
||||
databaseConfig := conf.Conf.Database
|
||||
newLogger := logger.New(
|
||||
log2.New(os.Stdout, "\r\n", log2.LstdFlags),
|
||||
logger.Config{
|
||||
SlowThreshold: time.Second,
|
||||
LogLevel: logger.Silent,
|
||||
IgnoreRecordNotFoundError: true,
|
||||
Colorful: true,
|
||||
},
|
||||
)
|
||||
gormConfig := &gorm.Config{
|
||||
NamingStrategy: schema.NamingStrategy{
|
||||
TablePrefix: databaseConfig.TablePrefix,
|
||||
},
|
||||
Logger: newLogger,
|
||||
}
|
||||
switch databaseConfig.Type {
|
||||
case "sqlite3":
|
||||
{
|
||||
if !(strings.HasSuffix(databaseConfig.DBFile, ".db") && len(databaseConfig.DBFile) > 3) {
|
||||
log.Fatalf("db name error.")
|
||||
}
|
||||
db, err := gorm.Open(sqlite.Open(databaseConfig.DBFile), gormConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to connect database:%s", err.Error())
|
||||
}
|
||||
conf.DB = db
|
||||
}
|
||||
case "mysql":
|
||||
{
|
||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?charset=utf8mb4&parseTime=True&loc=Local",
|
||||
databaseConfig.User, databaseConfig.Password, databaseConfig.Host, databaseConfig.Port, databaseConfig.Name)
|
||||
db, err := gorm.Open(mysql.Open(dsn), gormConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to connect database:%s", err.Error())
|
||||
}
|
||||
conf.DB = db
|
||||
}
|
||||
case "postgres":
|
||||
{
|
||||
dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%d sslmode=disable TimeZone=Asia/Shanghai",
|
||||
databaseConfig.Host, databaseConfig.User, databaseConfig.Password, databaseConfig.Name, databaseConfig.Port)
|
||||
db, err := gorm.Open(postgres.Open(dsn), gormConfig)
|
||||
if err != nil {
|
||||
log.Errorf("failed to connect database:%s", err.Error())
|
||||
}
|
||||
conf.DB = db
|
||||
|
||||
}
|
||||
default:
|
||||
log.Fatalf("not supported database type: %s", databaseConfig.Type)
|
||||
}
|
||||
log.Infof("auto migrate model...")
|
||||
err := conf.DB.AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{})
|
||||
if err != nil {
|
||||
log.Fatalf("failed to auto migrate")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,202 +0,0 @@
|
||||
package bootstrap
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/model"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func InitSettings() {
|
||||
log.Infof("init settings...")
|
||||
version := model.SettingItem{
|
||||
Key: "version",
|
||||
Value: conf.GitTag,
|
||||
Description: "version",
|
||||
Type: "string",
|
||||
Group: model.CONST,
|
||||
Version: conf.GitTag,
|
||||
}
|
||||
|
||||
err := model.SaveSetting(version)
|
||||
if err != nil {
|
||||
log.Fatalf("failed write setting: %s", err.Error())
|
||||
}
|
||||
|
||||
settings := []model.SettingItem{
|
||||
{
|
||||
Key: "title",
|
||||
Value: "Alist",
|
||||
Description: "title",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "password",
|
||||
Value: "alist",
|
||||
Description: "password",
|
||||
Type: "string",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
{
|
||||
Key: "logo",
|
||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202112/05/1542f45f86b8609495b69c5380753135.png",
|
||||
Description: "logo",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "favicon",
|
||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202112/05/1542f45f86b8609495b69c5380753135.png",
|
||||
Description: "favicon",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "icon color",
|
||||
Value: "teal.300",
|
||||
Description: "icon's color",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "text types",
|
||||
Value: "txt,htm,html,xml,java,properties,sql,js,md,json,conf,ini,vue,php,py,bat,gitignore,yml,go,sh,c,cpp,h,hpp,tsx",
|
||||
Type: "string",
|
||||
Description: "text type extensions",
|
||||
},
|
||||
{
|
||||
Key: "hide readme file",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Description: "hide readme file? ",
|
||||
},
|
||||
{
|
||||
Key: "music cover",
|
||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
||||
Description: "music cover image",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "site beian",
|
||||
Description: "chinese beian info",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "home readme url",
|
||||
Description: "when have multiple, the readme file to show",
|
||||
Type: "string",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "markdown theme",
|
||||
Value: "vuepress",
|
||||
Description: "default | github | vuepress",
|
||||
Group: model.PUBLIC,
|
||||
Type: "select",
|
||||
Values: "default,github,vuepress",
|
||||
},
|
||||
{
|
||||
Key: "autoplay video",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "autoplay audio",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "check parent folder",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Description: "check parent folder password",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
{
|
||||
Key: "customize head",
|
||||
Value: `<style>
|
||||
.chakra-ui-light{
|
||||
background-image: linear-gradient(120deg,#e0c3fc 0%,#8ec5fc 100%) !important;
|
||||
background-attachment: fixed;
|
||||
}
|
||||
.main-box {
|
||||
border-radius: 15px !important;
|
||||
}
|
||||
.chakra-ui-light .main-box {
|
||||
background-color: white !important;
|
||||
}
|
||||
.chakra-ui-light .readme-box {
|
||||
background-color: white !important;
|
||||
}
|
||||
.readme-box {
|
||||
border-radius: 15px !important;
|
||||
}
|
||||
</style>`,
|
||||
Type: "text",
|
||||
Description: "Customize head, placed at the beginning of the head",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
{
|
||||
Key: "customize body",
|
||||
Value: "",
|
||||
Type: "text",
|
||||
Description: "Customize script, placed at the end of the body",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
{
|
||||
Key: "animation",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Description: "when there are a lot of files, the animation will freeze when opening",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "check down link",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Description: "check down link password, your link will be 'https://alist.com/d/filename?pw=xxx'",
|
||||
Group: model.PUBLIC,
|
||||
},
|
||||
{
|
||||
Key: "WebDAV username",
|
||||
Value: "alist",
|
||||
Description: "WebDAV username",
|
||||
Type: "string",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
{
|
||||
Key: "WebDAV password",
|
||||
Value: "alist",
|
||||
Description: "WebDAV password",
|
||||
Type: "string",
|
||||
Group: model.PRIVATE,
|
||||
},
|
||||
}
|
||||
for i, _ := range settings {
|
||||
v := settings[i]
|
||||
v.Version = conf.GitTag
|
||||
o, err := model.GetSettingByKey(v.Key)
|
||||
if err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
err = model.SaveSetting(v)
|
||||
if err != nil {
|
||||
log.Fatalf("failed write setting: %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
log.Fatal("can't get setting: %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
o.Version = conf.GitTag
|
||||
err = model.SaveSetting(*o)
|
||||
if err != nil {
|
||||
log.Fatalf("failed write setting: %s", err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
model.LoadSettings()
|
||||
}
|
||||
570
build.sh
570
build.sh
@@ -1,106 +1,486 @@
|
||||
#!/bin/bash
|
||||
if [ "$1" == "web" ]; then
|
||||
git clone https://github.com/Xhofe/alist-web.git
|
||||
cd alist-web || exit
|
||||
yarn
|
||||
yarn build
|
||||
mv dist/* ../public
|
||||
cd ..
|
||||
exit 0
|
||||
fi
|
||||
|
||||
go env -w GOPROXY=https://goproxy.cn,https://mirrors.aliyun.com/goproxy/,https://goproxy.io,direct
|
||||
|
||||
if [ "$1" == "docker" ]; then
|
||||
appName="alist"
|
||||
builtAt="$(date +'%F %T %z')"
|
||||
goVersion=$(go version | sed 's/go version //')
|
||||
gitAuthor=$(git show -s --format='format:%aN <%ae>' HEAD)
|
||||
gitCommit=$(git log --pretty=format:"%h" -1)
|
||||
gitTag=$(git describe --long --tags --dirty --always)
|
||||
ldflags="\
|
||||
-w -s \
|
||||
-X 'github.com/Xhofe/alist/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/Xhofe/alist/conf.GoVersion=$goVersion' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitAuthor=$gitAuthor' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitCommit=$gitCommit' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitTag=$gitTag' \
|
||||
"
|
||||
go build -o ./bin/alist -ldflags="$ldflags" alist.go
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cd alist-web || exit
|
||||
webCommit=$(git log --pretty=format:"%h" -1)
|
||||
echo "web commit id: $webCommit"
|
||||
yarn
|
||||
if [ "$1" == "release" ]; then
|
||||
yarn build --base="https://cdn.jsdelivr.net/gh/Xhofe/alist-web@cdn/v2/$webCommit"
|
||||
mv dist/assets ..
|
||||
else
|
||||
yarn build
|
||||
fi
|
||||
cd ..
|
||||
|
||||
cd alist
|
||||
appName="alist"
|
||||
set -e
|
||||
appName="openlist"
|
||||
builtAt="$(date +'%F %T %z')"
|
||||
goVersion=$(go version | sed 's/go version //')
|
||||
gitAuthor=$(git show -s --format='format:%aN <%ae>' HEAD)
|
||||
gitAuthor="The OpenList Projects Contributors <noreply@openlist.team>"
|
||||
gitCommit=$(git log --pretty=format:"%h" -1)
|
||||
gitTag=$(git describe --long --tags --dirty --always)
|
||||
|
||||
echo "build version: $gitTag"
|
||||
githubAuthArgs=""
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
githubAuthArgs="--header \"Authorization: Bearer $GITHUB_TOKEN\""
|
||||
fi
|
||||
|
||||
# Check for lite parameter
|
||||
useLite=false
|
||||
if [[ "$*" == *"lite"* ]]; then
|
||||
useLite=true
|
||||
fi
|
||||
|
||||
if [ "$1" = "dev" ]; then
|
||||
version="dev"
|
||||
webVersion="dev"
|
||||
elif [ "$1" = "beta" ]; then
|
||||
version="beta"
|
||||
webVersion="dev"
|
||||
else
|
||||
git tag -d beta || true
|
||||
# Always true if there's no tag
|
||||
version=$(git describe --abbrev=0 --tags 2>/dev/null || echo "v0.0.0")
|
||||
webVersion=$(eval "curl -fsSL --max-time 2 $githubAuthArgs \"https://api.github.com/repos/OpenListTeam/OpenList-Frontend/releases/latest\"" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
fi
|
||||
|
||||
echo "backend version: $version"
|
||||
echo "frontend version: $webVersion"
|
||||
if [ "$useLite" = true ]; then
|
||||
echo "using lite frontend"
|
||||
else
|
||||
echo "using standard frontend"
|
||||
fi
|
||||
|
||||
ldflags="\
|
||||
-w -s \
|
||||
-X 'github.com/Xhofe/alist/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/Xhofe/alist/conf.GoVersion=$goVersion' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitAuthor=$gitAuthor' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitCommit=$gitCommit' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitTag=$gitTag' \
|
||||
-X 'github.com/OpenListTeam/OpenList/v4/internal/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/OpenListTeam/OpenList/v4/internal/conf.GitAuthor=$gitAuthor' \
|
||||
-X 'github.com/OpenListTeam/OpenList/v4/internal/conf.GitCommit=$gitCommit' \
|
||||
-X 'github.com/OpenListTeam/OpenList/v4/internal/conf.Version=$version' \
|
||||
-X 'github.com/OpenListTeam/OpenList/v4/internal/conf.WebVersion=$webVersion' \
|
||||
"
|
||||
|
||||
cp -R ../alist-web/dist/* public
|
||||
FetchWebDev() {
|
||||
pre_release_tag=$(eval "curl -fsSL --max-time 2 $githubAuthArgs https://api.github.com/repos/OpenListTeam/OpenList-Frontend/releases" | jq -r 'map(select(.prerelease)) | first | .tag_name')
|
||||
if [ -z "$pre_release_tag" ] || [ "$pre_release_tag" == "null" ]; then
|
||||
# fall back to latest release
|
||||
pre_release_json=$(eval "curl -fsSL --max-time 2 $githubAuthArgs -H \"Accept: application/vnd.github.v3+json\" \"https://api.github.com/repos/OpenListTeam/OpenList-Frontend/releases/latest\"")
|
||||
else
|
||||
pre_release_json=$(eval "curl -fsSL --max-time 2 $githubAuthArgs -H \"Accept: application/vnd.github.v3+json\" \"https://api.github.com/repos/OpenListTeam/OpenList-Frontend/releases/tags/$pre_release_tag\"")
|
||||
fi
|
||||
pre_release_assets=$(echo "$pre_release_json" | jq -r '.assets[].browser_download_url')
|
||||
|
||||
if [ "$useLite" = true ]; then
|
||||
pre_release_tar_url=$(echo "$pre_release_assets" | grep "openlist-frontend-dist-lite" | grep "\.tar\.gz$")
|
||||
else
|
||||
pre_release_tar_url=$(echo "$pre_release_assets" | grep "openlist-frontend-dist" | grep -v "lite" | grep "\.tar\.gz$")
|
||||
fi
|
||||
|
||||
curl -fsSL "$pre_release_tar_url" -o web-dist-dev.tar.gz
|
||||
rm -rf public/dist && mkdir -p public/dist
|
||||
tar -zxvf web-dist-dev.tar.gz -C public/dist
|
||||
rm -rf web-dist-dev.tar.gz
|
||||
}
|
||||
|
||||
if [ "$1" == "release" ]; then
|
||||
xgo -out alist -ldflags="$ldflags" .
|
||||
FetchWebRelease() {
|
||||
release_json=$(eval "curl -fsSL --max-time 2 $githubAuthArgs -H \"Accept: application/vnd.github.v3+json\" \"https://api.github.com/repos/OpenListTeam/OpenList-Frontend/releases/latest\"")
|
||||
release_assets=$(echo "$release_json" | jq -r '.assets[].browser_download_url')
|
||||
|
||||
if [ "$useLite" = true ]; then
|
||||
release_tar_url=$(echo "$release_assets" | grep "openlist-frontend-dist-lite" | grep "\.tar\.gz$")
|
||||
else
|
||||
release_tar_url=$(echo "$release_assets" | grep "openlist-frontend-dist" | grep -v "lite" | grep "\.tar\.gz$")
|
||||
fi
|
||||
|
||||
curl -fsSL "$release_tar_url" -o dist.tar.gz
|
||||
rm -rf public/dist && mkdir -p public/dist
|
||||
tar -zxvf dist.tar.gz -C public/dist
|
||||
rm -rf dist.tar.gz
|
||||
}
|
||||
|
||||
BuildWinArm64() {
|
||||
echo building for windows-arm64
|
||||
chmod +x ./wrapper/zcc-arm64
|
||||
chmod +x ./wrapper/zcxx-arm64
|
||||
export GOOS=windows
|
||||
export GOARCH=arm64
|
||||
export CC=$(pwd)/wrapper/zcc-arm64
|
||||
export CXX=$(pwd)/wrapper/zcxx-arm64
|
||||
export CGO_ENABLED=1
|
||||
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
BuildDev() {
|
||||
rm -rf .git/
|
||||
mkdir -p "dist"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://github.com/OpenListTeam/musl-compilers/releases/latest/download/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -fsSL -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
done
|
||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
echo building for ${os_arch}
|
||||
export GOOS=${os_arch%%-*}
|
||||
export GOARCH=${os_arch##*-}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
xgo -targets=windows/amd64,darwin/amd64,darwin/arm64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
mv "$appName"-* dist
|
||||
cd dist
|
||||
cp ./"$appName"-windows-amd64.exe ./"$appName"-windows-amd64-upx.exe
|
||||
upx -9 ./"$appName"-windows-amd64-upx.exe
|
||||
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
||||
cat md5.txt
|
||||
}
|
||||
|
||||
BuildDocker() {
|
||||
go build -o ./bin/"$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
PrepareBuildDockerMusl() {
|
||||
mkdir -p build/musl-libs
|
||||
BASE="https://github.com/OpenListTeam/musl-compilers/releases/latest/download/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross riscv64-linux-musl-cross powerpc64le-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
lib_tgz="build/${i}.tgz"
|
||||
curl -fsSL -o "${lib_tgz}" "${url}"
|
||||
tar xf "${lib_tgz}" --strip-components 1 -C build/musl-libs
|
||||
rm -f "${lib_tgz}"
|
||||
done
|
||||
}
|
||||
|
||||
BuildDockerMultiplatform() {
|
||||
go mod download
|
||||
|
||||
# run PrepareBuildDockerMusl before build
|
||||
export PATH=$PATH:$PWD/build/musl-libs/bin
|
||||
|
||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||
export CGO_ENABLED=1
|
||||
|
||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x linux-riscv64 linux-ppc64le)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc riscv64-linux-musl-gcc powerpc64le-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
os=${os_arch%%-*}
|
||||
arch=${os_arch##*-}
|
||||
export GOOS=$os
|
||||
export GOARCH=$arch
|
||||
export CC=${cgo_cc}
|
||||
echo "building for $os_arch"
|
||||
go build -o build/$os/$arch/"$appName" -ldflags="$docker_lflags" -tags=jsoniter .
|
||||
done
|
||||
|
||||
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
||||
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
|
||||
GO_ARM=(6 7)
|
||||
export GOOS=linux
|
||||
export GOARCH=arm
|
||||
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
|
||||
docker_arch=${DOCKER_ARM_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
export GOARM=${GO_ARM[$i]}
|
||||
export CC=${cgo_cc}
|
||||
echo "building for $docker_arch"
|
||||
go build -o build/${docker_arch%%-*}/${docker_arch##*-}/"$appName" -ldflags="$docker_lflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
BuildRelease() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
BuildWinArm64 ./build/"$appName"-windows-arm64.exe
|
||||
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
# why? Because some target platforms seem to have issues with upx compression
|
||||
upx -9 ./"$appName"-linux-amd64
|
||||
cp ./"$appName"-windows-amd64.exe ./"$appName"-windows-amd64-upx.exe
|
||||
upx -9 ./"$appName"-windows-amd64-upx.exe
|
||||
mv "$appName"-* build
|
||||
}
|
||||
|
||||
BuildReleaseLinuxMusl() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://github.com/OpenListTeam/musl-compilers/releases/latest/download/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross mips-linux-musl-cross mips64-linux-musl-cross mips64el-linux-musl-cross mipsel-linux-musl-cross powerpc64le-linux-musl-cross s390x-linux-musl-cross loongarch64-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -fsSL -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
rm -f "${i}.tgz"
|
||||
done
|
||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x linux-musl-loong64)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc loongarch64-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
echo building for ${os_arch}
|
||||
export GOOS=${os_arch%%-*}
|
||||
export GOARCH=${os_arch##*-}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
BuildReleaseLinuxMuslArm() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://github.com/OpenListTeam/musl-compilers/releases/latest/download/"
|
||||
FILES=(arm-linux-musleabi-cross arm-linux-musleabihf-cross armel-linux-musleabi-cross armel-linux-musleabihf-cross armv5l-linux-musleabi-cross armv5l-linux-musleabihf-cross armv6-linux-musleabi-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross armv7m-linux-musleabi-cross armv7r-linux-musleabihf-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -fsSL -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
rm -f "${i}.tgz"
|
||||
done
|
||||
OS_ARCHES=(linux-musleabi-arm linux-musleabihf-arm linux-musleabi-armel linux-musleabihf-armel linux-musleabi-armv5l linux-musleabihf-armv5l linux-musleabi-armv6 linux-musleabihf-armv6 linux-musleabihf-armv7l linux-musleabi-armv7m linux-musleabihf-armv7r)
|
||||
CGO_ARGS=(arm-linux-musleabi-gcc arm-linux-musleabihf-gcc armel-linux-musleabi-gcc armel-linux-musleabihf-gcc armv5l-linux-musleabi-gcc armv5l-linux-musleabihf-gcc armv6-linux-musleabi-gcc armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc armv7m-linux-musleabi-gcc armv7r-linux-musleabihf-gcc)
|
||||
GOARMS=('' '' '' '' '5' '5' '6' '6' '7' '7' '7')
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
arm=${GOARMS[$i]}
|
||||
echo building for ${os_arch}
|
||||
export GOOS=linux
|
||||
export GOARCH=arm
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
export GOARM=${arm}
|
||||
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
BuildReleaseAndroid() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
|
||||
unzip android-ndk-r26b-linux.zip
|
||||
rm android-ndk-r26b-linux.zip
|
||||
OS_ARCHES=(amd64 arm64 386 arm)
|
||||
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
|
||||
echo building for android-${os_arch}
|
||||
export GOOS=android
|
||||
export GOARCH=${os_arch##*-}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
|
||||
done
|
||||
}
|
||||
|
||||
BuildReleaseFreeBSD() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build/freebsd"
|
||||
|
||||
# Get latest FreeBSD 14.x release version from GitHub
|
||||
freebsd_version=$(eval "curl -fsSL --max-time 2 $githubAuthArgs \"https://api.github.com/repos/freebsd/freebsd-src/tags\"" | \
|
||||
jq -r '.[].name' | \
|
||||
grep '^release/14\.' | \
|
||||
sort -V | \
|
||||
tail -1 | \
|
||||
sed 's/release\///' | \
|
||||
sed 's/\.0$//')
|
||||
|
||||
if [ -z "$freebsd_version" ]; then
|
||||
echo "Failed to get FreeBSD version, falling back to 14.3"
|
||||
freebsd_version="14.3"
|
||||
fi
|
||||
|
||||
echo "Using FreeBSD version: $freebsd_version"
|
||||
|
||||
OS_ARCHES=(amd64 arm64 i386)
|
||||
GO_ARCHES=(amd64 arm64 386)
|
||||
CGO_ARGS=(x86_64-unknown-freebsd${freebsd_version} aarch64-unknown-freebsd${freebsd_version} i386-unknown-freebsd${freebsd_version})
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc="clang --target=${CGO_ARGS[$i]} --sysroot=/opt/freebsd/${os_arch}"
|
||||
echo building for freebsd-${os_arch}
|
||||
sudo mkdir -p "/opt/freebsd/${os_arch}"
|
||||
wget -q https://download.freebsd.org/releases/${os_arch}/${freebsd_version}-RELEASE/base.txz
|
||||
sudo tar -xf ./base.txz -C /opt/freebsd/${os_arch}
|
||||
rm base.txz
|
||||
export GOOS=freebsd
|
||||
export GOARCH=${GO_ARCHES[$i]}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
export CGO_LDFLAGS="-fuse-ld=lld"
|
||||
go build -o ./build/$appName-freebsd-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
MakeRelease() {
|
||||
cd build
|
||||
if [ -d compress ]; then
|
||||
rm -rv compress
|
||||
fi
|
||||
mkdir compress
|
||||
|
||||
# Add -lite suffix if useLite is true
|
||||
liteSuffix=""
|
||||
if [ "$useLite" = true ]; then
|
||||
liteSuffix="-lite"
|
||||
fi
|
||||
|
||||
for i in $(find . -type f -name "$appName-linux-*"); do
|
||||
cp "$i" "$appName"
|
||||
tar -czvf compress/"$i$liteSuffix".tar.gz "$appName"
|
||||
rm -f "$appName"
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-android-*"); do
|
||||
cp "$i" "$appName"
|
||||
tar -czvf compress/"$i$liteSuffix".tar.gz "$appName"
|
||||
rm -f "$appName"
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-darwin-*"); do
|
||||
cp "$i" "$appName"
|
||||
tar -czvf compress/"$i$liteSuffix".tar.gz "$appName"
|
||||
rm -f "$appName"
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-freebsd-*"); do
|
||||
cp "$i" "$appName"
|
||||
tar -czvf compress/"$i$liteSuffix".tar.gz "$appName"
|
||||
rm -f "$appName"
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-windows-*"); do
|
||||
cp "$i" "$appName".exe
|
||||
zip compress/$(echo $i | sed 's/\.[^.]*$//')$liteSuffix.zip "$appName".exe
|
||||
rm -f "$appName".exe
|
||||
done
|
||||
cd compress
|
||||
|
||||
# Handle MD5 filename - add -lite suffix only if not already present
|
||||
md5FileName="$1"
|
||||
if [ "$useLite" = true ] && [[ "$1" != *"-lite.txt" ]]; then
|
||||
md5FileName=$(echo "$1" | sed 's/\.txt$/-lite.txt/')
|
||||
fi
|
||||
|
||||
find . -type f -print0 | xargs -0 md5sum >"$md5FileName"
|
||||
cat "$md5FileName"
|
||||
cd ../..
|
||||
}
|
||||
|
||||
# Parse parameters to handle lite parameter position flexibility
|
||||
buildType=""
|
||||
dockerType=""
|
||||
otherParam=""
|
||||
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
dev|beta|release|zip|prepare)
|
||||
if [ -z "$buildType" ]; then
|
||||
buildType="$arg"
|
||||
fi
|
||||
;;
|
||||
docker|docker-multiplatform|linux_musl_arm|linux_musl|android|freebsd|web)
|
||||
if [ -z "$dockerType" ]; then
|
||||
dockerType="$arg"
|
||||
fi
|
||||
;;
|
||||
lite)
|
||||
# lite parameter is already handled above
|
||||
;;
|
||||
*)
|
||||
if [ -z "$otherParam" ]; then
|
||||
otherParam="$arg"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$buildType" = "dev" ]; then
|
||||
FetchWebDev
|
||||
if [ "$dockerType" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$dockerType" = "docker-multiplatform" ]; then
|
||||
BuildDockerMultiplatform
|
||||
elif [ "$dockerType" = "web" ]; then
|
||||
echo "web only"
|
||||
else
|
||||
BuildDev
|
||||
fi
|
||||
elif [ "$buildType" = "release" -o "$buildType" = "beta" ]; then
|
||||
if [ "$buildType" = "beta" ]; then
|
||||
FetchWebDev
|
||||
else
|
||||
FetchWebRelease
|
||||
fi
|
||||
if [ "$dockerType" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$dockerType" = "docker-multiplatform" ]; then
|
||||
BuildDockerMultiplatform
|
||||
elif [ "$dockerType" = "linux_musl_arm" ]; then
|
||||
BuildReleaseLinuxMuslArm
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-linux-musl-arm-lite.txt"
|
||||
else
|
||||
MakeRelease "md5-linux-musl-arm.txt"
|
||||
fi
|
||||
elif [ "$dockerType" = "linux_musl" ]; then
|
||||
BuildReleaseLinuxMusl
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-linux-musl-lite.txt"
|
||||
else
|
||||
MakeRelease "md5-linux-musl.txt"
|
||||
fi
|
||||
elif [ "$dockerType" = "android" ]; then
|
||||
BuildReleaseAndroid
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-android-lite.txt"
|
||||
else
|
||||
MakeRelease "md5-android.txt"
|
||||
fi
|
||||
elif [ "$dockerType" = "freebsd" ]; then
|
||||
BuildReleaseFreeBSD
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-freebsd-lite.txt"
|
||||
else
|
||||
MakeRelease "md5-freebsd.txt"
|
||||
fi
|
||||
elif [ "$dockerType" = "web" ]; then
|
||||
echo "web only"
|
||||
else
|
||||
BuildRelease
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-lite.txt"
|
||||
else
|
||||
MakeRelease "md5.txt"
|
||||
fi
|
||||
fi
|
||||
elif [ "$buildType" = "prepare" ]; then
|
||||
if [ "$dockerType" = "docker-multiplatform" ]; then
|
||||
PrepareBuildDockerMusl
|
||||
fi
|
||||
elif [ "$buildType" = "zip" ]; then
|
||||
if [ -n "$otherParam" ]; then
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "$otherParam-lite.txt"
|
||||
else
|
||||
MakeRelease "$otherParam.txt"
|
||||
fi
|
||||
elif [ -n "$dockerType" ]; then
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "$dockerType-lite.txt"
|
||||
else
|
||||
MakeRelease "$dockerType.txt"
|
||||
fi
|
||||
else
|
||||
if [ "$useLite" = true ]; then
|
||||
MakeRelease "md5-lite.txt"
|
||||
else
|
||||
MakeRelease "md5.txt"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
xgo -targets=linux/amd64,windows/amd64 -out alist -ldflags="$ldflags" .
|
||||
echo -e "Parameter error"
|
||||
echo -e "Usage: $0 {dev|beta|release|zip|prepare} [docker|docker-multiplatform|linux_musl_arm|linux_musl|android|freebsd|web] [lite] [other_params]"
|
||||
echo -e "Examples:"
|
||||
echo -e " $0 dev"
|
||||
echo -e " $0 dev lite"
|
||||
echo -e " $0 dev docker"
|
||||
echo -e " $0 dev docker lite"
|
||||
echo -e " $0 release"
|
||||
echo -e " $0 release lite"
|
||||
echo -e " $0 release docker lite"
|
||||
fi
|
||||
mkdir "build"
|
||||
mv alist-* build
|
||||
cd build || exit
|
||||
upx -9 ./*
|
||||
find . -type f -print0 | xargs -0 md5sum > md5.txt
|
||||
cat md5.txt
|
||||
# compress file (release)
|
||||
if [ "$1" == "release" ]; then
|
||||
mkdir compress
|
||||
mv md5.txt compress
|
||||
for i in `find . -type f -name "$appName-linux-*"`
|
||||
do
|
||||
tar -czvf compress/"$i".tar.gz "$i"
|
||||
done
|
||||
for i in `find . -type f -name "$appName-darwin-*"`
|
||||
do
|
||||
tar -czvf compress/"$i".tar.gz "$i"
|
||||
done
|
||||
for i in `find . -type f -name "$appName-windows-*"`
|
||||
do
|
||||
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip "$i"
|
||||
done
|
||||
fi
|
||||
cd ../..
|
||||
|
||||
if [ "$1" == "release" ]; then
|
||||
cd alist-web
|
||||
git checkout cdn
|
||||
mkdir "v2/$webCommit"
|
||||
mv ../assets/ v2/$webCommit
|
||||
git add .
|
||||
git config --local user.email "i@nn.ci"
|
||||
git config --local user.name "Xhofe"
|
||||
git commit --allow-empty -m "upload $webCommit assets files" -a
|
||||
cd ..
|
||||
fi
|
||||
100
cmd/admin.go
Normal file
100
cmd/admin.go
Normal file
@@ -0,0 +1,100 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/setting"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils/random"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// AdminCmd represents the password command
|
||||
var AdminCmd = &cobra.Command{
|
||||
Use: "admin",
|
||||
Aliases: []string{"password"},
|
||||
Short: "Show admin user's info and some operations about admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Admin user's username: %s", admin.Username)
|
||||
utils.Log.Infof("The password can only be output at the first startup, and then stored as a hash value, which cannot be reversed")
|
||||
utils.Log.Infof("You can reset the password with a random string by running [openlist admin random]")
|
||||
utils.Log.Infof("You can also set a new password by running [openlist admin set NEW_PASSWORD]")
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var RandomPasswordCmd = &cobra.Command{
|
||||
Use: "random",
|
||||
Short: "Reset admin user's password to a random string",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
newPwd := random.String(8)
|
||||
setAdminPassword(newPwd)
|
||||
},
|
||||
}
|
||||
|
||||
var SetPasswordCmd = &cobra.Command{
|
||||
Use: "set",
|
||||
Short: "Set admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) == 0 {
|
||||
utils.Log.Errorf("Please enter the new password")
|
||||
return
|
||||
}
|
||||
setAdminPassword(args[0])
|
||||
},
|
||||
}
|
||||
|
||||
var ShowTokenCmd = &cobra.Command{
|
||||
Use: "token",
|
||||
Short: "Show admin token",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
token := setting.GetStr(conf.Token)
|
||||
utils.Log.Infof("Admin token: %s", token)
|
||||
},
|
||||
}
|
||||
|
||||
func setAdminPassword(pwd string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
return
|
||||
}
|
||||
admin.SetPassword(pwd)
|
||||
if err := op.UpdateUser(admin); err != nil {
|
||||
utils.Log.Errorf("failed update admin user: %+v", err)
|
||||
return
|
||||
}
|
||||
utils.Log.Infof("admin user has been updated:")
|
||||
utils.Log.Infof("username: %s", admin.Username)
|
||||
utils.Log.Infof("password: %s", pwd)
|
||||
DelAdminCacheOnline()
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(AdminCmd)
|
||||
AdminCmd.AddCommand(RandomPasswordCmd)
|
||||
AdminCmd.AddCommand(SetPasswordCmd)
|
||||
AdminCmd.AddCommand(ShowTokenCmd)
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// passwordCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// passwordCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
46
cmd/cancel2FA.go
Normal file
46
cmd/cancel2FA.go
Normal file
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// Cancel2FACmd represents the delete2fa command
|
||||
var Cancel2FACmd = &cobra.Command{
|
||||
Use: "cancel2fa",
|
||||
Short: "Delete 2FA of admin user",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to get admin user: %+v", err)
|
||||
} else {
|
||||
err := op.Cancel2FAByUser(admin)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to cancel 2FA: %+v", err)
|
||||
} else {
|
||||
utils.Log.Info("2FA canceled")
|
||||
DelAdminCacheOnline()
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(Cancel2FACmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// cancel2FACmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// cancel2FACmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
51
cmd/common.go
Normal file
51
cmd/common.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/bootstrap"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/bootstrap/data"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/db"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func Init() {
|
||||
bootstrap.InitConfig()
|
||||
bootstrap.Log()
|
||||
bootstrap.InitDB()
|
||||
data.InitData()
|
||||
bootstrap.InitStreamLimit()
|
||||
bootstrap.InitIndex()
|
||||
bootstrap.InitUpgradePatch()
|
||||
}
|
||||
|
||||
func Release() {
|
||||
db.Close()
|
||||
}
|
||||
|
||||
var pid = -1
|
||||
var pidFile string
|
||||
|
||||
func initDaemon() {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
exPath := filepath.Dir(ex)
|
||||
_ = os.MkdirAll(filepath.Join(exPath, "daemon"), 0700)
|
||||
pidFile = filepath.Join(exPath, "daemon/pid")
|
||||
if utils.Exists(pidFile) {
|
||||
bytes, err := os.ReadFile(pidFile)
|
||||
if err != nil {
|
||||
log.Fatal("failed to read pid file", err)
|
||||
}
|
||||
id, err := strconv.Atoi(string(bytes))
|
||||
if err != nil {
|
||||
log.Fatal("failed to parse pid data", err)
|
||||
}
|
||||
pid = id
|
||||
}
|
||||
}
|
||||
311
cmd/crypt.go
Normal file
311
cmd/crypt.go
Normal file
@@ -0,0 +1,311 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
rcCrypt "github.com/rclone/rclone/backend/crypt"
|
||||
"github.com/rclone/rclone/fs/config/configmap"
|
||||
"github.com/rclone/rclone/fs/config/obscure"
|
||||
)
|
||||
|
||||
// encryption and decryption command format for Crypt driver
|
||||
|
||||
type options struct {
|
||||
op string //decrypt or encrypt
|
||||
src string //source dir or file
|
||||
dst string //out destination
|
||||
|
||||
pwd string //de/encrypt password
|
||||
salt string
|
||||
filenameEncryption string //reference drivers\crypt\meta.go Addtion
|
||||
dirnameEncryption string
|
||||
filenameEncode string
|
||||
suffix string
|
||||
}
|
||||
|
||||
var opt options
|
||||
|
||||
// CryptCmd represents the crypt command
|
||||
var CryptCmd = &cobra.Command{
|
||||
Use: "crypt",
|
||||
Short: "Encrypt or decrypt local file or dir",
|
||||
Example: `openlist crypt -s ./src/encrypt/ --op=de --pwd=123456 --salt=345678`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
opt.validate()
|
||||
opt.cryptFileDir()
|
||||
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(CryptCmd)
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// versionCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
CryptCmd.Flags().StringVarP(&opt.src, "src", "s", "", "src file or dir to encrypt/decrypt")
|
||||
CryptCmd.Flags().StringVarP(&opt.dst, "dst", "d", "", "dst dir to output,if not set,output to src dir")
|
||||
CryptCmd.Flags().StringVar(&opt.op, "op", "", "de or en which stands for decrypt or encrypt")
|
||||
|
||||
CryptCmd.Flags().StringVar(&opt.pwd, "pwd", "", "password used to encrypt/decrypt,if not contain ___Obfuscated___ prefix,will be obfuscated before used")
|
||||
CryptCmd.Flags().StringVar(&opt.salt, "salt", "", "salt used to encrypt/decrypt,if not contain ___Obfuscated___ prefix,will be obfuscated before used")
|
||||
CryptCmd.Flags().StringVar(&opt.filenameEncryption, "filename-encrypt", "off", "filename encryption mode: off,standard,obfuscate")
|
||||
CryptCmd.Flags().StringVar(&opt.dirnameEncryption, "dirname-encrypt", "false", "is dirname encryption enabled:true,false")
|
||||
CryptCmd.Flags().StringVar(&opt.filenameEncode, "filename-encode", "base64", "filename encoding mode: base64,base32,base32768")
|
||||
CryptCmd.Flags().StringVar(&opt.suffix, "suffix", ".bin", "suffix for encrypted file,default is .bin")
|
||||
}
|
||||
|
||||
func (o *options) validate() {
|
||||
if o.src == "" {
|
||||
log.Fatal("src can not be empty")
|
||||
}
|
||||
if o.op != "encrypt" && o.op != "decrypt" && o.op != "en" && o.op != "de" {
|
||||
log.Fatal("op must be encrypt or decrypt")
|
||||
}
|
||||
if o.filenameEncryption != "off" && o.filenameEncryption != "standard" && o.filenameEncryption != "obfuscate" {
|
||||
log.Fatal("filename_encryption must be off,standard,obfuscate")
|
||||
}
|
||||
if o.filenameEncode != "base64" && o.filenameEncode != "base32" && o.filenameEncode != "base32768" {
|
||||
log.Fatal("filename_encode must be base64,base32,base32768")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (o *options) cryptFileDir() {
|
||||
src, _ := filepath.Abs(o.src)
|
||||
log.Infof("src abs is %v", src)
|
||||
|
||||
fileInfo, err := os.Stat(src)
|
||||
if err != nil {
|
||||
log.Fatalf("reading file/dir %v failed,err:%v", src, err)
|
||||
|
||||
}
|
||||
pwd := updateObfusParm(o.pwd)
|
||||
salt := updateObfusParm(o.salt)
|
||||
|
||||
//create cipher
|
||||
config := configmap.Simple{
|
||||
"password": pwd,
|
||||
"password2": salt,
|
||||
"filename_encryption": o.filenameEncryption,
|
||||
"directory_name_encryption": o.dirnameEncryption,
|
||||
"filename_encoding": o.filenameEncode,
|
||||
"suffix": o.suffix,
|
||||
"pass_bad_blocks": "",
|
||||
}
|
||||
log.Infof("config:%v", config)
|
||||
cipher, err := rcCrypt.NewCipher(config)
|
||||
if err != nil {
|
||||
log.Fatalf("create cipher failed,err:%v", err)
|
||||
|
||||
}
|
||||
dst := ""
|
||||
//check and create dst dir
|
||||
if o.dst != "" {
|
||||
dst, _ = filepath.Abs(o.dst)
|
||||
checkCreateDir(dst)
|
||||
}
|
||||
|
||||
// src is file
|
||||
if !fileInfo.IsDir() { //file
|
||||
if dst == "" {
|
||||
dst = filepath.Dir(src)
|
||||
}
|
||||
o.cryptFile(cipher, src, dst)
|
||||
return
|
||||
}
|
||||
|
||||
// src is dir
|
||||
if dst == "" {
|
||||
//if src is dir and not set dst dir ,create ${src}_crypt dir as dst dir
|
||||
dst = path.Join(filepath.Dir(src), fileInfo.Name()+"_crypt")
|
||||
}
|
||||
log.Infof("dst : %v", dst)
|
||||
|
||||
dirnameMap := make(map[string]string)
|
||||
pathSeparator := string(os.PathSeparator)
|
||||
|
||||
filepath.Walk(src, func(p string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
log.Errorf("get file %v info failed, err:%v", p, err)
|
||||
return err
|
||||
}
|
||||
if p == src {
|
||||
return nil
|
||||
}
|
||||
log.Infof("current path %v", p)
|
||||
|
||||
// relative path
|
||||
rp := strings.ReplaceAll(p, src, "")
|
||||
log.Infof("relative path %v", rp)
|
||||
|
||||
rpds := strings.Split(rp, pathSeparator)
|
||||
|
||||
if info.IsDir() {
|
||||
// absolute dst dir for current path
|
||||
dd := ""
|
||||
|
||||
if o.dirnameEncryption == "true" {
|
||||
if o.op == "encrypt" || o.op == "en" {
|
||||
for i := range rpds {
|
||||
oname := rpds[i]
|
||||
if _, ok := dirnameMap[rpds[i]]; ok {
|
||||
rpds[i] = dirnameMap[rpds[i]]
|
||||
} else {
|
||||
rpds[i] = cipher.EncryptDirName(rpds[i])
|
||||
dirnameMap[oname] = rpds[i]
|
||||
}
|
||||
}
|
||||
dd = path.Join(dst, strings.Join(rpds, pathSeparator))
|
||||
} else {
|
||||
for i := range rpds {
|
||||
oname := rpds[i]
|
||||
if _, ok := dirnameMap[rpds[i]]; ok {
|
||||
rpds[i] = dirnameMap[rpds[i]]
|
||||
} else {
|
||||
dnn, err := cipher.DecryptDirName(rpds[i])
|
||||
if err != nil {
|
||||
log.Fatalf("decrypt dir name %v failed,err:%v", rpds[i], err)
|
||||
}
|
||||
rpds[i] = dnn
|
||||
dirnameMap[oname] = dnn
|
||||
}
|
||||
|
||||
}
|
||||
dd = path.Join(dst, strings.Join(rpds, pathSeparator))
|
||||
}
|
||||
|
||||
} else {
|
||||
dd = path.Join(dst, rp)
|
||||
}
|
||||
|
||||
log.Infof("create output dir %v", dd)
|
||||
checkCreateDir(dd)
|
||||
return nil
|
||||
}
|
||||
|
||||
// file dst dir
|
||||
fdd := dst
|
||||
|
||||
if o.dirnameEncryption == "true" {
|
||||
for i := range rpds {
|
||||
if i == len(rpds)-1 {
|
||||
break
|
||||
}
|
||||
fdd = path.Join(fdd, dirnameMap[rpds[i]])
|
||||
}
|
||||
|
||||
} else {
|
||||
fdd = path.Join(fdd, strings.Join(rpds[:len(rpds)-1], pathSeparator))
|
||||
}
|
||||
|
||||
log.Infof("file output dir %v", fdd)
|
||||
o.cryptFile(cipher, p, fdd)
|
||||
return nil
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (o *options) cryptFile(cipher *rcCrypt.Cipher, src string, dst string) {
|
||||
fileInfo, err := os.Stat(src)
|
||||
if err != nil {
|
||||
log.Fatalf("get file %v info failed,err:%v", src, err)
|
||||
|
||||
}
|
||||
fd, err := os.OpenFile(src, os.O_RDWR, 0666)
|
||||
if err != nil {
|
||||
log.Fatalf("open file %v failed,err:%v", src, err)
|
||||
|
||||
}
|
||||
defer fd.Close()
|
||||
|
||||
var cryptSrcReader io.Reader
|
||||
var outFile string
|
||||
if o.op == "encrypt" || o.op == "en" {
|
||||
filename := fileInfo.Name()
|
||||
if o.filenameEncryption != "off" {
|
||||
filename = cipher.EncryptFileName(fileInfo.Name())
|
||||
log.Infof("encrypt file name %v to %v", fileInfo.Name(), filename)
|
||||
} else {
|
||||
filename = fileInfo.Name() + o.suffix
|
||||
}
|
||||
cryptSrcReader, err = cipher.EncryptData(fd)
|
||||
if err != nil {
|
||||
log.Fatalf("encrypt file %v failed,err:%v", src, err)
|
||||
|
||||
}
|
||||
outFile = path.Join(dst, filename)
|
||||
} else {
|
||||
filename := fileInfo.Name()
|
||||
if o.filenameEncryption != "off" {
|
||||
filename, err = cipher.DecryptFileName(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("decrypt file name %v failed,err:%v", src, err)
|
||||
}
|
||||
log.Infof("decrypt file name %v to %v, ", fileInfo.Name(), filename)
|
||||
} else {
|
||||
filename = strings.TrimSuffix(filename, o.suffix)
|
||||
}
|
||||
|
||||
cryptSrcReader, err = cipher.DecryptData(fd)
|
||||
if err != nil {
|
||||
log.Fatalf("decrypt file %v failed,err:%v", src, err)
|
||||
|
||||
}
|
||||
outFile = path.Join(dst, filename)
|
||||
}
|
||||
//write new file
|
||||
wr, err := os.OpenFile(outFile, os.O_CREATE|os.O_WRONLY, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("create file %v failed,err:%v", outFile, err)
|
||||
|
||||
}
|
||||
defer wr.Close()
|
||||
|
||||
_, err = io.Copy(wr, cryptSrcReader)
|
||||
if err != nil {
|
||||
log.Fatalf("write file %v failed,err:%v", outFile, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// check dir exist ,if not ,create
|
||||
func checkCreateDir(dir string) {
|
||||
_, err := os.Stat(dir)
|
||||
|
||||
if os.IsNotExist(err) {
|
||||
err := os.MkdirAll(dir, 0755)
|
||||
if err != nil {
|
||||
log.Fatalf("create dir %v failed,err:%v", dir, err)
|
||||
}
|
||||
return
|
||||
} else if err != nil {
|
||||
log.Fatalf("read dir %v err: %v", dir, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func updateObfusParm(str string) string {
|
||||
obfuscatedPrefix := "___Obfuscated___"
|
||||
if !strings.HasPrefix(str, obfuscatedPrefix) {
|
||||
str, err := obscure.Obscure(str)
|
||||
if err != nil {
|
||||
log.Fatalf("update obfuscated parameter failed,err:%v", str)
|
||||
}
|
||||
} else {
|
||||
str, _ = strings.CutPrefix(str, obfuscatedPrefix)
|
||||
}
|
||||
return str
|
||||
}
|
||||
10
cmd/flags/config.go
Normal file
10
cmd/flags/config.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package flags
|
||||
|
||||
var (
|
||||
DataDir string
|
||||
Debug bool
|
||||
NoPrefix bool
|
||||
Dev bool
|
||||
ForceBinDir bool
|
||||
LogStd bool
|
||||
)
|
||||
55
cmd/kill.go
Normal file
55
cmd/kill.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// KillCmd represents the kill command
|
||||
var KillCmd = &cobra.Command{
|
||||
Use: "kill",
|
||||
Short: "Force kill openlist server process by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
kill()
|
||||
},
|
||||
}
|
||||
|
||||
func kill() {
|
||||
initDaemon()
|
||||
if pid == -1 {
|
||||
log.Info("Seems not have been started. Try use `openlist start` to start server.")
|
||||
return
|
||||
}
|
||||
process, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||
return
|
||||
}
|
||||
err = process.Kill()
|
||||
if err != nil {
|
||||
log.Errorf("failed to kill process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Info("killed process: ", pid)
|
||||
}
|
||||
err = os.Remove(pidFile)
|
||||
if err != nil {
|
||||
log.Errorf("failed to remove pid file")
|
||||
}
|
||||
pid = -1
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(KillCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
169
cmd/lang.go
Normal file
169
cmd/lang.go
Normal file
@@ -0,0 +1,169 @@
|
||||
/*
|
||||
Package cmd
|
||||
Copyright © 2022 Noah Hsu<i@nn.ci>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
_ "github.com/OpenListTeam/OpenList/v4/drivers"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/bootstrap"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/bootstrap/data"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
type KV[V any] map[string]V
|
||||
|
||||
type Drivers KV[KV[interface{}]]
|
||||
|
||||
var frontendPath string
|
||||
|
||||
func firstUpper(s string) string {
|
||||
if s == "" {
|
||||
return ""
|
||||
}
|
||||
return strings.ToUpper(s[:1]) + s[1:]
|
||||
}
|
||||
|
||||
func convert(s string) string {
|
||||
ss := strings.Split(s, "_")
|
||||
ans := strings.Join(ss, " ")
|
||||
return firstUpper(ans)
|
||||
}
|
||||
|
||||
func writeFile(name string, data interface{}) {
|
||||
f, err := os.Open(fmt.Sprintf("%s/src/lang/en/%s.json", frontendPath, name))
|
||||
if err != nil {
|
||||
log.Errorf("failed to open %s.json: %+v", name, err)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
content, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
log.Errorf("failed to read %s.json: %+v", name, err)
|
||||
return
|
||||
}
|
||||
oldData := make(map[string]interface{})
|
||||
newData := make(map[string]interface{})
|
||||
err = utils.Json.Unmarshal(content, &oldData)
|
||||
if err != nil {
|
||||
log.Errorf("failed to unmarshal %s.json: %+v", name, err)
|
||||
return
|
||||
}
|
||||
content, err = utils.Json.Marshal(data)
|
||||
if err != nil {
|
||||
log.Errorf("failed to marshal json: %+v", err)
|
||||
return
|
||||
}
|
||||
err = utils.Json.Unmarshal(content, &newData)
|
||||
if err != nil {
|
||||
log.Errorf("failed to unmarshal json: %+v", err)
|
||||
return
|
||||
}
|
||||
if reflect.DeepEqual(oldData, newData) {
|
||||
log.Infof("%s.json no changed, skip", name)
|
||||
} else {
|
||||
log.Infof("%s.json changed, update file", name)
|
||||
//log.Infof("old: %+v\nnew:%+v", oldData, data)
|
||||
utils.WriteJsonToFile(fmt.Sprintf("lang/%s.json", name), newData, true)
|
||||
}
|
||||
}
|
||||
|
||||
func generateDriversJson() {
|
||||
drivers := make(Drivers)
|
||||
drivers["drivers"] = make(KV[interface{}])
|
||||
drivers["config"] = make(KV[interface{}])
|
||||
driverInfoMap := op.GetDriverInfoMap()
|
||||
for k, v := range driverInfoMap {
|
||||
drivers["drivers"][k] = convert(k)
|
||||
items := make(KV[interface{}])
|
||||
config := map[string]string{}
|
||||
if v.Config.Alert != "" {
|
||||
alert := strings.SplitN(v.Config.Alert, "|", 2)
|
||||
if len(alert) > 1 {
|
||||
config["alert"] = alert[1]
|
||||
}
|
||||
}
|
||||
drivers["config"][k] = config
|
||||
for i := range v.Additional {
|
||||
item := v.Additional[i]
|
||||
items[item.Name] = convert(item.Name)
|
||||
if item.Help != "" {
|
||||
items[fmt.Sprintf("%s-tips", item.Name)] = item.Help
|
||||
}
|
||||
if item.Type == conf.TypeSelect && len(item.Options) > 0 {
|
||||
options := make(KV[string])
|
||||
_options := strings.Split(item.Options, ",")
|
||||
for _, o := range _options {
|
||||
options[o] = convert(o)
|
||||
}
|
||||
items[fmt.Sprintf("%ss", item.Name)] = options
|
||||
}
|
||||
}
|
||||
drivers[k] = items
|
||||
}
|
||||
writeFile("drivers", drivers)
|
||||
}
|
||||
|
||||
func generateSettingsJson() {
|
||||
settings := data.InitialSettings()
|
||||
settingsLang := make(KV[any])
|
||||
for _, setting := range settings {
|
||||
settingsLang[setting.Key] = convert(setting.Key)
|
||||
if setting.Help != "" {
|
||||
settingsLang[fmt.Sprintf("%s-tips", setting.Key)] = setting.Help
|
||||
}
|
||||
if setting.Type == conf.TypeSelect && len(setting.Options) > 0 {
|
||||
options := make(KV[string])
|
||||
_options := strings.Split(setting.Options, ",")
|
||||
for _, o := range _options {
|
||||
options[o] = convert(o)
|
||||
}
|
||||
settingsLang[fmt.Sprintf("%ss", setting.Key)] = options
|
||||
}
|
||||
}
|
||||
writeFile("settings", settingsLang)
|
||||
//utils.WriteJsonToFile("lang/settings.json", settingsLang)
|
||||
}
|
||||
|
||||
// LangCmd represents the lang command
|
||||
var LangCmd = &cobra.Command{
|
||||
Use: "lang",
|
||||
Short: "Generate language json file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
frontendPath, _ = cmd.Flags().GetString("frontend-path")
|
||||
bootstrap.InitConfig()
|
||||
err := os.MkdirAll("lang", 0777)
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed create folder: %s", err.Error())
|
||||
}
|
||||
generateDriversJson()
|
||||
generateSettingsJson()
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(LangCmd)
|
||||
|
||||
// Add frontend-path flag
|
||||
LangCmd.Flags().String("frontend-path", "../OpenList-Frontend", "Path to the frontend project directory")
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// langCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// langCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
32
cmd/restart.go
Normal file
32
cmd/restart.go
Normal file
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// RestartCmd represents the restart command
|
||||
var RestartCmd = &cobra.Command{
|
||||
Use: "restart",
|
||||
Short: "Restart openlist server by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
start()
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(RestartCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// restartCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// restartCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
36
cmd/root.go
Normal file
36
cmd/root.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/cmd/flags"
|
||||
_ "github.com/OpenListTeam/OpenList/v4/drivers"
|
||||
_ "github.com/OpenListTeam/OpenList/v4/internal/archive"
|
||||
_ "github.com/OpenListTeam/OpenList/v4/internal/offline_download"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var RootCmd = &cobra.Command{
|
||||
Use: "openlist",
|
||||
Short: "A file list program that supports multiple storage.",
|
||||
Long: `A file list program that supports multiple storage,
|
||||
built with love by OpenListTeam.
|
||||
Complete documentation is available at https://docs.openlist.team/`,
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
if err := RootCmd.Execute(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.PersistentFlags().StringVar(&flags.DataDir, "data", "data", "data folder")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.ForceBinDir, "force-bin-dir", false, "Force to use the directory where the binary file is located as data directory")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.LogStd, "log-std", false, "Force to log to std")
|
||||
}
|
||||
246
cmd/server.go
Normal file
246
cmd/server.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/cmd/flags"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/bootstrap"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/fs"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/OpenListTeam/OpenList/v4/server"
|
||||
"github.com/OpenListTeam/sftpd-openlist"
|
||||
ftpserver "github.com/fclairamb/ftpserverlib"
|
||||
"github.com/gin-gonic/gin"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/net/http2"
|
||||
"golang.org/x/net/http2/h2c"
|
||||
)
|
||||
|
||||
// ServerCmd represents the server command
|
||||
var ServerCmd = &cobra.Command{
|
||||
Use: "server",
|
||||
Short: "Start the server at the specified address",
|
||||
Long: `Start the server at the specified address
|
||||
the address is defined in config file`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
if conf.Conf.DelayedStart != 0 {
|
||||
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
||||
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
||||
}
|
||||
bootstrap.InitOfflineDownloadTools()
|
||||
bootstrap.LoadStorages()
|
||||
bootstrap.InitTaskManager()
|
||||
if !flags.Debug && !flags.Dev {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
}
|
||||
r := gin.New()
|
||||
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||
server.Init(r)
|
||||
var httpHandler http.Handler = r
|
||||
if conf.Conf.Scheme.EnableH2c {
|
||||
httpHandler = h2c.NewHandler(r, &http2.Server{})
|
||||
}
|
||||
var httpSrv, httpsSrv, unixSrv *http.Server
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpPort)
|
||||
utils.Log.Infof("start HTTP server @ %s", httpBase)
|
||||
httpSrv = &http.Server{Addr: httpBase, Handler: httpHandler}
|
||||
go func() {
|
||||
err := httpSrv.ListenAndServe()
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpsPort)
|
||||
utils.Log.Infof("start HTTPS server @ %s", httpsBase)
|
||||
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||
go func() {
|
||||
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
utils.Log.Infof("start unix server @ %s", conf.Conf.Scheme.UnixFile)
|
||||
unixSrv = &http.Server{Handler: httpHandler}
|
||||
go func() {
|
||||
listener, err := net.Listen("unix", conf.Conf.Scheme.UnixFile)
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to listen unix: %+v", err)
|
||||
}
|
||||
// set socket file permission
|
||||
mode, err := strconv.ParseUint(conf.Conf.Scheme.UnixFilePerm, 8, 32)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to parse socket file permission: %+v", err)
|
||||
} else {
|
||||
err = os.Chmod(conf.Conf.Scheme.UnixFile, os.FileMode(mode))
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to chmod socket file: %+v", err)
|
||||
}
|
||||
}
|
||||
err = unixSrv.Serve(listener)
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.S3.Port != -1 && conf.Conf.S3.Enable {
|
||||
s3r := gin.New()
|
||||
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||
server.InitS3(s3r)
|
||||
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
|
||||
utils.Log.Infof("start S3 server @ %s", s3Base)
|
||||
go func() {
|
||||
var err error
|
||||
if conf.Conf.S3.SSL {
|
||||
httpsSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
||||
err = httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
}
|
||||
if !conf.Conf.S3.SSL {
|
||||
httpSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
||||
err = httpSrv.ListenAndServe()
|
||||
}
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start s3 server: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
var ftpDriver *server.FtpMainDriver
|
||||
var ftpServer *ftpserver.FtpServer
|
||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable {
|
||||
var err error
|
||||
ftpDriver, err = server.NewMainDriver()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to start ftp driver: %s", err.Error())
|
||||
} else {
|
||||
utils.Log.Infof("start ftp server on %s", conf.Conf.FTP.Listen)
|
||||
go func() {
|
||||
ftpServer = ftpserver.NewFtpServer(ftpDriver)
|
||||
err = ftpServer.ListenAndServe()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("problem ftp server listening: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
var sftpDriver *server.SftpDriver
|
||||
var sftpServer *sftpd.SftpServer
|
||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable {
|
||||
var err error
|
||||
sftpDriver, err = server.NewSftpDriver()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to start sftp driver: %s", err.Error())
|
||||
} else {
|
||||
utils.Log.Infof("start sftp server on %s", conf.Conf.SFTP.Listen)
|
||||
go func() {
|
||||
sftpServer = sftpd.NewSftpServer(sftpDriver)
|
||||
err = sftpServer.RunServer()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("problem sftp server listening: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
// Wait for interrupt signal to gracefully shutdown the server with
|
||||
// a timeout of 1 second.
|
||||
quit := make(chan os.Signal, 1)
|
||||
// kill (no param) default send syscanll.SIGTERM
|
||||
// kill -2 is syscall.SIGINT
|
||||
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
utils.Log.Println("Shutdown server...")
|
||||
fs.ArchiveContentUploadTaskManager.RemoveAll()
|
||||
Release()
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||
defer cancel()
|
||||
var wg sync.WaitGroup
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpsSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTPS server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := unixSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("Unix server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable && ftpServer != nil && ftpDriver != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
ftpDriver.Stop()
|
||||
if err := ftpServer.Stop(); err != nil {
|
||||
utils.Log.Fatal("FTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable && sftpServer != nil && sftpDriver != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := sftpServer.Close(); err != nil {
|
||||
utils.Log.Fatal("SFTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
utils.Log.Println("Server exit")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(ServerCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// serverCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// serverCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
|
||||
// OutOpenListInit 暴露用于外部启动server的函数
|
||||
func OutOpenListInit() {
|
||||
var (
|
||||
cmd *cobra.Command
|
||||
args []string
|
||||
)
|
||||
ServerCmd.Run(cmd, args)
|
||||
}
|
||||
71
cmd/start.go
Normal file
71
cmd/start.go
Normal file
@@ -0,0 +1,71 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StartCmd represents the start command
|
||||
var StartCmd = &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Silent start openlist server with `--force-bin-dir`",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
start()
|
||||
},
|
||||
}
|
||||
|
||||
func start() {
|
||||
initDaemon()
|
||||
if pid != -1 {
|
||||
_, err := os.FindProcess(pid)
|
||||
if err == nil {
|
||||
log.Info("openlist already started, pid ", pid)
|
||||
return
|
||||
}
|
||||
}
|
||||
args := os.Args
|
||||
args[1] = "server"
|
||||
args = append(args, "--force-bin-dir")
|
||||
cmd := &exec.Cmd{
|
||||
Path: args[0],
|
||||
Args: args,
|
||||
Env: os.Environ(),
|
||||
}
|
||||
stdout, err := os.OpenFile(filepath.Join(filepath.Dir(pidFile), "start.log"), os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
log.Fatal(os.Getpid(), ": failed to open start log file:", err)
|
||||
}
|
||||
cmd.Stderr = stdout
|
||||
cmd.Stdout = stdout
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
log.Fatal("failed to start children process: ", err)
|
||||
}
|
||||
log.Infof("success start pid: %d", cmd.Process.Pid)
|
||||
err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0666)
|
||||
if err != nil {
|
||||
log.Warn("failed to record pid, you may not be able to stop the program with `./openlist stop`")
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StartCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// startCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// startCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
58
cmd/stop_default.go
Normal file
58
cmd/stop_default.go
Normal file
@@ -0,0 +1,58 @@
|
||||
//go:build !windows
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StopCmd represents the stop command
|
||||
var StopCmd = &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stop openlist server by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
},
|
||||
}
|
||||
|
||||
func stop() {
|
||||
initDaemon()
|
||||
if pid == -1 {
|
||||
log.Info("Seems not have been started. Try use `openlist start` to start server.")
|
||||
return
|
||||
}
|
||||
process, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||
return
|
||||
}
|
||||
err = process.Signal(syscall.SIGTERM)
|
||||
if err != nil {
|
||||
log.Errorf("failed to terminate process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Info("terminated process: ", pid)
|
||||
}
|
||||
err = os.Remove(pidFile)
|
||||
if err != nil {
|
||||
log.Errorf("failed to remove pid file")
|
||||
}
|
||||
pid = -1
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StopCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
34
cmd/stop_windows.go
Normal file
34
cmd/stop_windows.go
Normal file
@@ -0,0 +1,34 @@
|
||||
//go:build windows
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StopCmd represents the stop command
|
||||
var StopCmd = &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Same as the kill command",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
},
|
||||
}
|
||||
|
||||
func stop() {
|
||||
kill()
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StopCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
163
cmd/storage.go
Normal file
163
cmd/storage.go
Normal file
@@ -0,0 +1,163 @@
|
||||
/*
|
||||
Copyright © 2023 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/db"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/charmbracelet/bubbles/table"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// storageCmd represents the storage command
|
||||
var storageCmd = &cobra.Command{
|
||||
Use: "storage",
|
||||
Short: "Manage storage",
|
||||
}
|
||||
|
||||
var disableStorageCmd = &cobra.Command{
|
||||
Use: "disable",
|
||||
Short: "Disable a storage",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) < 1 {
|
||||
utils.Log.Errorf("mount path is required")
|
||||
return
|
||||
}
|
||||
mountPath := args[0]
|
||||
Init()
|
||||
defer Release()
|
||||
storage, err := db.GetStorageByMountPath(mountPath)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storage: %+v", err)
|
||||
} else {
|
||||
storage.Disabled = true
|
||||
err = db.UpdateStorage(storage)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to update storage: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Storage with mount path [%s] have been disabled", mountPath)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var baseStyle = lipgloss.NewStyle().
|
||||
BorderStyle(lipgloss.NormalBorder()).
|
||||
BorderForeground(lipgloss.Color("240"))
|
||||
|
||||
type model struct {
|
||||
table table.Model
|
||||
}
|
||||
|
||||
func (m model) Init() tea.Cmd { return nil }
|
||||
|
||||
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var cmd tea.Cmd
|
||||
switch msg := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
switch msg.String() {
|
||||
case "esc":
|
||||
if m.table.Focused() {
|
||||
m.table.Blur()
|
||||
} else {
|
||||
m.table.Focus()
|
||||
}
|
||||
case "q", "ctrl+c":
|
||||
return m, tea.Quit
|
||||
//case "enter":
|
||||
// return m, tea.Batch(
|
||||
// tea.Printf("Let's go to %s!", m.table.SelectedRow()[1]),
|
||||
// )
|
||||
}
|
||||
}
|
||||
m.table, cmd = m.table.Update(msg)
|
||||
return m, cmd
|
||||
}
|
||||
|
||||
func (m model) View() string {
|
||||
return baseStyle.Render(m.table.View()) + "\n"
|
||||
}
|
||||
|
||||
var storageTableHeight int
|
||||
var listStorageCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List all storages",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
storages, _, err := db.GetStorages(1, -1)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storages: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Found %d storages", len(storages))
|
||||
columns := []table.Column{
|
||||
{Title: "ID", Width: 4},
|
||||
{Title: "Driver", Width: 16},
|
||||
{Title: "Mount Path", Width: 30},
|
||||
{Title: "Enabled", Width: 7},
|
||||
}
|
||||
|
||||
var rows []table.Row
|
||||
for i := range storages {
|
||||
storage := storages[i]
|
||||
enabled := "true"
|
||||
if storage.Disabled {
|
||||
enabled = "false"
|
||||
}
|
||||
rows = append(rows, table.Row{
|
||||
strconv.Itoa(int(storage.ID)),
|
||||
storage.Driver,
|
||||
storage.MountPath,
|
||||
enabled,
|
||||
})
|
||||
}
|
||||
t := table.New(
|
||||
table.WithColumns(columns),
|
||||
table.WithRows(rows),
|
||||
table.WithFocused(true),
|
||||
table.WithHeight(storageTableHeight),
|
||||
)
|
||||
|
||||
s := table.DefaultStyles()
|
||||
s.Header = s.Header.
|
||||
BorderStyle(lipgloss.NormalBorder()).
|
||||
BorderForeground(lipgloss.Color("240")).
|
||||
BorderBottom(true).
|
||||
Bold(false)
|
||||
s.Selected = s.Selected.
|
||||
Foreground(lipgloss.Color("229")).
|
||||
Background(lipgloss.Color("57")).
|
||||
Bold(false)
|
||||
t.SetStyles(s)
|
||||
|
||||
m := model{t}
|
||||
if _, err := tea.NewProgram(m).Run(); err != nil {
|
||||
utils.Log.Errorf("failed to run program: %+v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
RootCmd.AddCommand(storageCmd)
|
||||
storageCmd.AddCommand(disableStorageCmd)
|
||||
storageCmd.AddCommand(listStorageCmd)
|
||||
storageCmd.PersistentFlags().IntVarP(&storageTableHeight, "height", "H", 10, "Table height")
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// storageCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// storageCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
52
cmd/user.go
Normal file
52
cmd/user.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/setting"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func DelAdminCacheOnline() {
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("[del_admin_cache] get admin error: %+v", err)
|
||||
return
|
||||
}
|
||||
DelUserCacheOnline(admin.Username)
|
||||
}
|
||||
|
||||
func DelUserCacheOnline(username string) {
|
||||
client := resty.New().SetTimeout(1 * time.Second).SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
token := setting.GetStr(conf.Token)
|
||||
port := conf.Conf.Scheme.HttpPort
|
||||
u := fmt.Sprintf("http://localhost:%d/api/admin/user/del_cache", port)
|
||||
if port == -1 {
|
||||
if conf.Conf.Scheme.HttpsPort == -1 {
|
||||
utils.Log.Warnf("[del_user_cache] no open port")
|
||||
return
|
||||
}
|
||||
u = fmt.Sprintf("https://localhost:%d/api/admin/user/del_cache", conf.Conf.Scheme.HttpsPort)
|
||||
}
|
||||
res, err := client.R().SetHeader("Authorization", token).SetQueryParam("username", username).Post(u)
|
||||
if err != nil {
|
||||
utils.Log.Warnf("[del_user_cache_online] failed: %+v", err)
|
||||
return
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
utils.Log.Warnf("[del_user_cache_online] failed: %+v", res.String())
|
||||
return
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
msg := utils.Json.Get(res.Body(), "message").ToString()
|
||||
if code != 200 {
|
||||
utils.Log.Errorf("[del_user_cache_online] error: %s", msg)
|
||||
return
|
||||
}
|
||||
utils.Log.Debugf("[del_user_cache_online] del user [%s] cache success", username)
|
||||
}
|
||||
45
cmd/version.go
Normal file
45
cmd/version.go
Normal file
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// VersionCmd represents the version command
|
||||
var VersionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show current version of OpenList",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
goVersion := fmt.Sprintf("%s %s/%s", runtime.Version(), runtime.GOOS, runtime.GOARCH)
|
||||
|
||||
fmt.Printf(`Built At: %s
|
||||
Go Version: %s
|
||||
Author: %s
|
||||
Commit ID: %s
|
||||
Version: %s
|
||||
WebVersion: %s
|
||||
`, conf.BuiltAt, goVersion, conf.GitAuthor, conf.GitCommit, conf.Version, conf.WebVersion)
|
||||
os.Exit(0)
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(VersionCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// versionCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// versionCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
package conf
|
||||
|
||||
type Database struct {
|
||||
Type string `json:"type"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
Name string `json:"name"`
|
||||
TablePrefix string `json:"table_prefix"`
|
||||
DBFile string `json:"db_file"`
|
||||
}
|
||||
type Config struct {
|
||||
Address string `json:"address"`
|
||||
Port int `json:"port"`
|
||||
Database Database `json:"database"`
|
||||
Https bool `json:"https"`
|
||||
CertFile string `json:"cert_file"`
|
||||
KeyFile string `json:"key_file"`
|
||||
}
|
||||
|
||||
func DefaultConfig() *Config {
|
||||
return &Config{
|
||||
Address: "0.0.0.0",
|
||||
Port: 5244,
|
||||
Database: Database{
|
||||
Type: "sqlite3",
|
||||
Port: 0,
|
||||
TablePrefix: "x_",
|
||||
DBFile: "data/data.db",
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
package conf
|
||||
|
||||
const (
|
||||
UNKNOWN = iota
|
||||
FOLDER
|
||||
OFFICE
|
||||
VIDEO
|
||||
AUDIO
|
||||
TEXT
|
||||
IMAGE
|
||||
)
|
||||
49
conf/var.go
49
conf/var.go
@@ -1,49 +0,0 @@
|
||||
package conf
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/eko/gocache/v2/cache"
|
||||
"github.com/robfig/cron/v3"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var (
|
||||
BuiltAt string
|
||||
GoVersion string
|
||||
GitAuthor string
|
||||
GitCommit string
|
||||
GitTag string = "dev"
|
||||
)
|
||||
|
||||
var (
|
||||
ConfigFile string // config file
|
||||
Conf *Config
|
||||
Debug bool
|
||||
Version bool
|
||||
Password bool
|
||||
|
||||
DB *gorm.DB
|
||||
Cache *cache.Cache
|
||||
Ctx = context.TODO()
|
||||
Cron *cron.Cron
|
||||
)
|
||||
|
||||
var (
|
||||
TextTypes = []string{"txt", "go", "md"}
|
||||
OfficeTypes = []string{"doc", "docx", "xls", "xlsx", "ppt", "pptx", "pdf"}
|
||||
VideoTypes = []string{"mp4", "mkv", "avi", "mov", "rmvb", "webm"}
|
||||
AudioTypes = []string{"mp3", "flac", "ogg", "m4a"}
|
||||
ImageTypes = []string{"jpg", "tiff", "jpeg", "png", "gif", "bmp", "svg"}
|
||||
)
|
||||
|
||||
// settings
|
||||
var (
|
||||
RawIndexHtml string
|
||||
IndexHtml string
|
||||
CheckParent bool
|
||||
CheckDown bool
|
||||
|
||||
Token string
|
||||
DavUsername string
|
||||
DavPassword string
|
||||
)
|
||||
15
docker-compose.yml
Normal file
15
docker-compose.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
services:
|
||||
openlist:
|
||||
restart: always
|
||||
volumes:
|
||||
- '/etc/openlist:/opt/openlist/data'
|
||||
ports:
|
||||
- '5244:5244'
|
||||
- '5245:5245'
|
||||
environment:
|
||||
- PUID=0
|
||||
- PGID=0
|
||||
- UMASK=022
|
||||
- TZ=UTC
|
||||
container_name: openlist
|
||||
image: 'openlistteam/openlist:latest'
|
||||
43
drivers/115/appver.go
Normal file
43
drivers/115/appver.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
md5Salt = "Qclm8MGWUv59TnrR0XPg"
|
||||
appVer = "27.0.5.7"
|
||||
)
|
||||
|
||||
func (d *Pan115) getAppVersion() ([]driver115.AppVersion, error) {
|
||||
result := driver115.VersionResp{}
|
||||
resp, err := base.RestyClient.R().Get(driver115.ApiGetVersion)
|
||||
|
||||
err = driver115.CheckErr(err, &result, resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result.Data.GetAppVersions(), nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getAppVer() string {
|
||||
// todo add some cache?
|
||||
vers, err := d.getAppVersion()
|
||||
if err != nil {
|
||||
log.Warnf("[115] get app version failed: %v", err)
|
||||
return appVer
|
||||
}
|
||||
for _, ver := range vers {
|
||||
if ver.AppName == "win" {
|
||||
return ver.Version
|
||||
}
|
||||
}
|
||||
return appVer
|
||||
}
|
||||
|
||||
func (d *Pan115) initAppVer() {
|
||||
appVer = d.getAppVer()
|
||||
}
|
||||
248
drivers/115/driver.go
Normal file
248
drivers/115/driver.go
Normal file
@@ -0,0 +1,248 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
streamPkg "github.com/OpenListTeam/OpenList/v4/internal/stream"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/http_range"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type Pan115 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
appVerOnce sync.Once
|
||||
}
|
||||
|
||||
func (d *Pan115) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan115) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan115) Init(ctx context.Context) error {
|
||||
d.appVerOnce.Do(d.initAppVer)
|
||||
if d.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||
}
|
||||
return d.login()
|
||||
}
|
||||
|
||||
func (d *Pan115) WaitLimit(ctx context.Context) error {
|
||||
if d.limiter != nil {
|
||||
return d.limiter.Wait(ctx)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src FileObj) (model.Obj, error) {
|
||||
return &src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
userAgent := args.Header.Get("User-Agent")
|
||||
downloadInfo, err := d.
|
||||
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
link := &model.Link{
|
||||
URL: downloadInfo.Url.Url,
|
||||
Header: downloadInfo.Header,
|
||||
}
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := driver115.MkdirResp{}
|
||||
form := map[string]string{
|
||||
"pid": parentDir.GetID(),
|
||||
"cname": dirName,
|
||||
}
|
||||
req := d.client.NewRequest().
|
||||
SetFormData(form).
|
||||
SetResult(&result).
|
||||
ForceContentType("application/json;charset=UTF-8")
|
||||
|
||||
resp, err := req.Post(driver115.ApiDirAdd)
|
||||
|
||||
err = driver115.CheckErr(err, &result, resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile(result.FileID)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := d.client.Move(dstDir.GetID(), srcObj.GetID()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile(srcObj.GetID())
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := d.client.Rename(srcObj.GetID(), newName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile((srcObj.GetID()))
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Delete(obj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var (
|
||||
fastInfo *driver115.UploadInitResp
|
||||
dirID = dstDir.GetID()
|
||||
)
|
||||
|
||||
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
||||
return nil, err
|
||||
}
|
||||
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
||||
return nil, driver115.ErrUploadTooLarge
|
||||
}
|
||||
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
||||
// return err
|
||||
//}
|
||||
|
||||
const PreHashSize int64 = 128 * utils.KB
|
||||
hashSize := PreHashSize
|
||||
if stream.GetSize() < PreHashSize {
|
||||
hashSize = stream.GetSize()
|
||||
}
|
||||
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
preHash, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
preHash = strings.ToUpper(preHash)
|
||||
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
||||
if len(fullHash) != utils.SHA1.Width {
|
||||
_, fullHash, err = streamPkg.CacheFullInTempFileAndHash(stream, utils.SHA1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
fullHash = strings.ToUpper(fullHash)
|
||||
|
||||
// rapid-upload
|
||||
// note that 115 add timeout for rapid-upload,
|
||||
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
||||
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if matched, err := fastInfo.Ok(); err != nil {
|
||||
return nil, err
|
||||
} else if matched {
|
||||
f, err := d.getNewFileByPickCode(fastInfo.PickCode)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
var uploadResult *UploadResult
|
||||
// 闪传失败,上传
|
||||
if stream.GetSize() <= 10*utils.MB { // 文件大小小于10MB,改用普通模式上传
|
||||
if uploadResult, err = d.UploadByOSS(ctx, &fastInfo.UploadOSSParams, stream, dirID, up); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
// 分片上传
|
||||
if uploadResult, err = d.UploadByMultipart(ctx, &fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID, up); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
file, err := d.getNewFile(uploadResult.Data.FileID)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
||||
resp, err := d.client.ListOfflineTask(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.Tasks, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) OfflineDownload(ctx context.Context, uris []string, dstDir model.Obj) ([]string, error) {
|
||||
return d.client.AddOfflineTaskURIs(uris, dstDir.GetID(), driver115.WithAppVer(appVer))
|
||||
}
|
||||
|
||||
func (d *Pan115) DeleteOfflineTasks(ctx context.Context, hashes []string, deleteFiles bool) error {
|
||||
return d.client.DeleteOfflineTasks(hashes, deleteFiles)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan115)(nil)
|
||||
29
drivers/115/meta.go
Normal file
29
drivers/115/meta.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate ([limit]r/1s)"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
// OnlyProxy: true,
|
||||
// OnlyLocal: true,
|
||||
// NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan115{}
|
||||
})
|
||||
}
|
||||
38
drivers/115/types.go
Normal file
38
drivers/115/types.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
)
|
||||
|
||||
var _ model.Obj = (*FileObj)(nil)
|
||||
|
||||
type FileObj struct {
|
||||
driver.File
|
||||
}
|
||||
|
||||
func (f *FileObj) CreateTime() time.Time {
|
||||
return f.File.CreateTime
|
||||
}
|
||||
|
||||
func (f *FileObj) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||
}
|
||||
|
||||
type UploadResult struct {
|
||||
driver.BasicResp
|
||||
Data struct {
|
||||
PickCode string `json:"pick_code"`
|
||||
FileSize int `json:"file_size"`
|
||||
FileID string `json:"file_id"`
|
||||
ThumbURL string `json:"thumb_url"`
|
||||
Sha1 string `json:"sha1"`
|
||||
Aid int `json:"aid"`
|
||||
FileName string `json:"file_name"`
|
||||
Cid string `json:"cid"`
|
||||
IsVideo int `json:"is_video"`
|
||||
} `json:"data"`
|
||||
}
|
||||
549
drivers/115/util.go
Normal file
549
drivers/115/util.go
Normal file
@@ -0,0 +1,549 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"crypto/tls"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/conf"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/http_range"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
|
||||
cipher "github.com/SheltonZhu/115driver/pkg/crypto/ec115"
|
||||
crypto "github.com/SheltonZhu/115driver/pkg/crypto/m115"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// var UserAgent = driver115.UA115Browser
|
||||
func (d *Pan115) login() error {
|
||||
var err error
|
||||
opts := []driver115.Option{
|
||||
driver115.UA(d.getUA()),
|
||||
func(c *driver115.Pan115Client) {
|
||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
},
|
||||
}
|
||||
d.client = driver115.New(opts...)
|
||||
cr := &driver115.Credential{}
|
||||
if d.QRCodeToken != "" {
|
||||
s := &driver115.QRCodeSession{
|
||||
UID: d.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
||||
d.QRCodeToken = ""
|
||||
} else if d.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||
return errors.Wrap(err, "failed to login by cookies")
|
||||
}
|
||||
d.client.ImportCredential(cr)
|
||||
} else {
|
||||
return errors.New("missing cookie or qrcode account")
|
||||
}
|
||||
return d.client.LoginCheck()
|
||||
}
|
||||
|
||||
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
||||
res := make([]FileObj, 0)
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = driver115.FileListLimit
|
||||
}
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize, driver115.WithMultiUrls())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range *files {
|
||||
res = append(res, FileObj{file})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getNewFile(fileId string) (*FileObj, error) {
|
||||
file, err := d.client.GetFile(fileId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &FileObj{*file}, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getNewFileByPickCode(pickCode string) (*FileObj, error) {
|
||||
result := driver115.GetFileInfoResponse{}
|
||||
req := d.client.NewRequest().
|
||||
SetQueryParam("pick_code", pickCode).
|
||||
ForceContentType("application/json;charset=UTF-8").
|
||||
SetResult(&result)
|
||||
resp, err := req.Get(driver115.ApiFileInfo)
|
||||
if err := driver115.CheckErr(err, &result, resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(result.Files) == 0 {
|
||||
return nil, errors.New("not get file info")
|
||||
}
|
||||
fileInfo := result.Files[0]
|
||||
|
||||
f := &FileObj{}
|
||||
f.From(fileInfo)
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getUA() string {
|
||||
return fmt.Sprintf("Mozilla/5.0 115Browser/%s", appVer)
|
||||
}
|
||||
|
||||
func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||
key := crypto.GenerateKey()
|
||||
result := driver115.DownloadResp{}
|
||||
params, err := utils.Json.Marshal(map[string]string{"pick_code": pickCode})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data := crypto.Encode(params, key)
|
||||
|
||||
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.AndroidApiDownloadGetUrl, driver115.Now().String())
|
||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("Cookie", d.Cookie)
|
||||
req.Header.Set("User-Agent", ua)
|
||||
|
||||
resp, err := d.client.Client.GetClient().Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := utils.Json.Unmarshal(body, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = result.Err(string(body)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b, err := crypto.Decode(string(result.EncodedData), key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
downloadInfo := struct {
|
||||
Url string `json:"url"`
|
||||
}{}
|
||||
if err := utils.Json.Unmarshal(b, &downloadInfo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := &driver115.DownloadInfo{}
|
||||
info.PickCode = pickCode
|
||||
info.Header = resp.Request.Header
|
||||
info.Url.Url = downloadInfo.Url
|
||||
return info, nil
|
||||
}
|
||||
|
||||
func (c *Pan115) GenerateToken(fileID, preID, timeStamp, fileSize, signKey, signVal string) string {
|
||||
userID := strconv.FormatInt(c.client.UserID, 10)
|
||||
userIDMd5 := md5.Sum([]byte(userID))
|
||||
tokenMd5 := md5.Sum([]byte(md5Salt + fileID + fileSize + signKey + signVal + userID + timeStamp + hex.EncodeToString(userIDMd5[:]) + appVer))
|
||||
return hex.EncodeToString(tokenMd5[:])
|
||||
}
|
||||
|
||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||
var (
|
||||
ecdhCipher *cipher.EcdhCipher
|
||||
encrypted []byte
|
||||
decrypted []byte
|
||||
encodedToken string
|
||||
err error
|
||||
target = "U_1_" + dirID
|
||||
bodyBytes []byte
|
||||
result = driver115.UploadInitResp{}
|
||||
fileSizeStr = strconv.FormatInt(fileSize, 10)
|
||||
)
|
||||
if ecdhCipher, err = cipher.NewEcdhCipher(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
userID := strconv.FormatInt(d.client.UserID, 10)
|
||||
form := url.Values{}
|
||||
form.Set("appid", "0")
|
||||
form.Set("appversion", appVer)
|
||||
form.Set("userid", userID)
|
||||
form.Set("filename", fileName)
|
||||
form.Set("filesize", fileSizeStr)
|
||||
form.Set("fileid", fileID)
|
||||
form.Set("target", target)
|
||||
form.Set("sig", d.client.GenerateSignature(fileID, target))
|
||||
|
||||
signKey, signVal := "", ""
|
||||
for retry := true; retry; {
|
||||
t := driver115.NowMilli()
|
||||
|
||||
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := map[string]string{
|
||||
"k_ec": encodedToken,
|
||||
}
|
||||
|
||||
form.Set("t", t.String())
|
||||
form.Set("token", d.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||
if signKey != "" && signVal != "" {
|
||||
form.Set("sign_key", signKey)
|
||||
form.Set("sign_val", signVal)
|
||||
}
|
||||
if encrypted, err = ecdhCipher.Encrypt([]byte(form.Encode())); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req := d.client.NewRequest().
|
||||
SetQueryParams(params).
|
||||
SetBody(encrypted).
|
||||
SetHeaderVerbatim("Content-Type", "application/x-www-form-urlencoded").
|
||||
SetDoNotParseResponse(true)
|
||||
resp, err := req.Post(driver115.ApiUploadInit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data := resp.RawBody()
|
||||
defer data.Close()
|
||||
if bodyBytes, err = io.ReadAll(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if decrypted, err = ecdhCipher.Decrypt(bodyBytes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = driver115.CheckErr(json.Unmarshal(decrypted, &result), &result, resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result.Status == 7 {
|
||||
// Update signKey & signVal
|
||||
signKey = result.SignKey
|
||||
signVal, err = UploadDigestRange(stream, result.SignCheck)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
retry = false
|
||||
}
|
||||
result.SHA1 = fileID
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result string, err error) {
|
||||
var start, end int64
|
||||
if _, err = fmt.Sscanf(rangeSpec, "%d-%d", &start, &end); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
length := end - start + 1
|
||||
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
result = strings.ToUpper(hashStr)
|
||||
return
|
||||
}
|
||||
|
||||
// UploadByOSS use aliyun sdk to upload
|
||||
func (c *Pan115) UploadByOSS(ctx context.Context, params *driver115.UploadOSSParams, s model.FileStreamer, dirID string, up driver.UpdateProgress) (*UploadResult, error) {
|
||||
ossToken, err := c.client.GetOSSToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ossClient, err := oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
bucket, err := ossClient.Bucket(params.Bucket)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var bodyBytes []byte
|
||||
r := driver.NewLimitedUploadStream(ctx, &driver.ReaderUpdatingProgress{
|
||||
Reader: s,
|
||||
UpdateProgress: up,
|
||||
})
|
||||
if err = bucket.PutObject(params.Object, r, append(
|
||||
driver115.OssOption(params, ossToken),
|
||||
oss.CallbackResult(&bodyBytes),
|
||||
)...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var uploadResult UploadResult
|
||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||
}
|
||||
|
||||
// UploadByMultipart upload by mutipart blocks
|
||||
func (d *Pan115) UploadByMultipart(ctx context.Context, params *driver115.UploadOSSParams, fileSize int64, s model.FileStreamer,
|
||||
dirID string, up driver.UpdateProgress, opts ...driver115.UploadMultipartOption) (*UploadResult, error) {
|
||||
var (
|
||||
chunks []oss.FileChunk
|
||||
parts []oss.UploadPart
|
||||
imur oss.InitiateMultipartUploadResult
|
||||
ossClient *oss.Client
|
||||
bucket *oss.Bucket
|
||||
ossToken *driver115.UploadOSSTokenResp
|
||||
bodyBytes []byte
|
||||
err error
|
||||
)
|
||||
|
||||
tmpF, err := s.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
options := driver115.DefalutUploadMultipartOptions()
|
||||
if len(opts) > 0 {
|
||||
for _, f := range opts {
|
||||
f(options)
|
||||
}
|
||||
}
|
||||
// oss 启用Sequential必须按顺序上传
|
||||
options.ThreadsNum = 1
|
||||
|
||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret, oss.EnableMD5(true), oss.EnableCRC(true)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
||||
ticker := time.NewTicker(options.TokenRefreshTime)
|
||||
defer ticker.Stop()
|
||||
// 设置超时
|
||||
timeout := time.NewTimer(options.Timeout)
|
||||
|
||||
if chunks, err = SplitFile(fileSize); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
||||
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
||||
oss.UserAgentHeader(driver115.OSSUserAgent),
|
||||
oss.EnableSha1(), oss.Sequential(),
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(chunks))
|
||||
|
||||
chunksCh := make(chan oss.FileChunk)
|
||||
errCh := make(chan error)
|
||||
UploadedPartsCh := make(chan oss.UploadPart)
|
||||
quit := make(chan struct{})
|
||||
|
||||
// producer
|
||||
go chunksProducer(chunksCh, chunks)
|
||||
go func() {
|
||||
wg.Wait()
|
||||
quit <- struct{}{}
|
||||
}()
|
||||
|
||||
completedNum := atomic.Int32{}
|
||||
// consumers
|
||||
for i := 0; i < options.ThreadsNum; i++ {
|
||||
go func(threadId int) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
errCh <- fmt.Errorf("recovered in %v", r)
|
||||
}
|
||||
}()
|
||||
for chunk := range chunksCh {
|
||||
var part oss.UploadPart // 出现错误就继续尝试,共尝试3次
|
||||
for retry := 0; retry < 3; retry++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
break
|
||||
case <-ticker.C:
|
||||
if ossToken, err = d.client.GetOSSToken(); err != nil { // 到时重新获取ossToken
|
||||
errCh <- errors.Wrap(err, "刷新token时出现错误")
|
||||
}
|
||||
default:
|
||||
}
|
||||
buf := make([]byte, chunk.Size)
|
||||
if _, err = tmpF.ReadAt(buf, chunk.Offset); err != nil && !errors.Is(err, io.EOF) {
|
||||
continue
|
||||
}
|
||||
if part, err = bucket.UploadPart(imur, driver.NewLimitedUploadStream(ctx, bytes.NewReader(buf)),
|
||||
chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, fmt.Sprintf("上传 %s 的第%d个分片时出现错误:%v", s.GetName(), chunk.Number, err))
|
||||
} else {
|
||||
num := completedNum.Add(1)
|
||||
up(float64(num) * 100.0 / float64(len(chunks)))
|
||||
}
|
||||
UploadedPartsCh <- part
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
go func() {
|
||||
for part := range UploadedPartsCh {
|
||||
parts = append(parts, part)
|
||||
wg.Done()
|
||||
}
|
||||
}()
|
||||
LOOP:
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
// 到时重新获取ossToken
|
||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case <-quit:
|
||||
break LOOP
|
||||
case <-errCh:
|
||||
return nil, err
|
||||
case <-timeout.C:
|
||||
return nil, fmt.Errorf("time out")
|
||||
}
|
||||
}
|
||||
|
||||
// 不知道啥原因,oss那边分片上传不计算sha1,导致115服务器校验错误
|
||||
// params.Callback.Callback = strings.ReplaceAll(params.Callback.Callback, "${sha1}", params.SHA1)
|
||||
if _, err := bucket.CompleteMultipartUpload(imur, parts, append(
|
||||
driver115.OssOption(params, ossToken),
|
||||
oss.CallbackResult(&bodyBytes),
|
||||
)...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var uploadResult UploadResult
|
||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||
}
|
||||
|
||||
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
||||
for _, chunk := range chunks {
|
||||
ch <- chunk
|
||||
}
|
||||
}
|
||||
|
||||
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
||||
for i := int64(1); i < 10; i++ {
|
||||
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
|
||||
if chunks, err = SplitFileByPartNum(fileSize, int(i*1000)); err != nil {
|
||||
return
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if fileSize > 9*utils.GB { // 文件大小大于9GB时分为10000片
|
||||
if chunks, err = SplitFileByPartNum(fileSize, 10000); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
// 单个分片大小不能小于100KB
|
||||
if chunks[0].Size < 100*utils.KB {
|
||||
if chunks, err = SplitFileByPartSize(fileSize, 100*utils.KB); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// SplitFileByPartNum splits big file into parts by the num of parts.
|
||||
// Split the file with specified parts count, returns the split result when error is nil.
|
||||
func SplitFileByPartNum(fileSize int64, chunkNum int) ([]oss.FileChunk, error) {
|
||||
if chunkNum <= 0 || chunkNum > 10000 {
|
||||
return nil, errors.New("chunkNum invalid")
|
||||
}
|
||||
|
||||
if int64(chunkNum) > fileSize {
|
||||
return nil, errors.New("oss: chunkNum invalid")
|
||||
}
|
||||
|
||||
var chunks []oss.FileChunk
|
||||
chunk := oss.FileChunk{}
|
||||
chunkN := (int64)(chunkNum)
|
||||
for i := int64(0); i < chunkN; i++ {
|
||||
chunk.Number = int(i + 1)
|
||||
chunk.Offset = i * (fileSize / chunkN)
|
||||
if i == chunkN-1 {
|
||||
chunk.Size = fileSize/chunkN + fileSize%chunkN
|
||||
} else {
|
||||
chunk.Size = fileSize / chunkN
|
||||
}
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
|
||||
return chunks, nil
|
||||
}
|
||||
|
||||
// SplitFileByPartSize splits big file into parts by the size of parts.
|
||||
// Splits the file by the part size. Returns the FileChunk when error is nil.
|
||||
func SplitFileByPartSize(fileSize int64, chunkSize int64) ([]oss.FileChunk, error) {
|
||||
if chunkSize <= 0 {
|
||||
return nil, errors.New("chunkSize invalid")
|
||||
}
|
||||
|
||||
chunkN := fileSize / chunkSize
|
||||
if chunkN >= 10000 {
|
||||
return nil, errors.New("Too many parts, please increase part size")
|
||||
}
|
||||
|
||||
var chunks []oss.FileChunk
|
||||
chunk := oss.FileChunk{}
|
||||
for i := int64(0); i < chunkN; i++ {
|
||||
chunk.Number = int(i + 1)
|
||||
chunk.Offset = i * chunkSize
|
||||
chunk.Size = chunkSize
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
|
||||
if fileSize%chunkSize > 0 {
|
||||
chunk.Number = len(chunks) + 1
|
||||
chunk.Offset = int64(len(chunks)) * chunkSize
|
||||
chunk.Size = fileSize % chunkSize
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
|
||||
return chunks, nil
|
||||
}
|
||||
331
drivers/115_open/driver.go
Normal file
331
drivers/115_open/driver.go
Normal file
@@ -0,0 +1,331 @@
|
||||
package _115_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/cmd/flags"
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/stream"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/http_range"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
sdk "github.com/OpenListTeam/115-sdk-go"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type Open115 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *sdk.Client
|
||||
limiter *rate.Limiter
|
||||
}
|
||||
|
||||
func (d *Open115) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Open115) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Open115) Init(ctx context.Context) error {
|
||||
d.client = sdk.New(sdk.WithRefreshToken(d.Addition.RefreshToken),
|
||||
sdk.WithAccessToken(d.Addition.AccessToken),
|
||||
sdk.WithOnRefreshToken(func(s1, s2 string) {
|
||||
d.Addition.AccessToken = s1
|
||||
d.Addition.RefreshToken = s2
|
||||
op.MustSaveDriverStorage(d)
|
||||
}))
|
||||
if flags.Debug || flags.Dev {
|
||||
d.client.SetDebug(true)
|
||||
}
|
||||
_, err := d.client.UserInfo(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if d.Addition.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.Addition.LimitRate), 1)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open115) WaitLimit(ctx context.Context) error {
|
||||
if d.limiter != nil {
|
||||
return d.limiter.Wait(ctx)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open115) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
var res []model.Obj
|
||||
pageSize := int64(200)
|
||||
offset := int64(0)
|
||||
for {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp, err := d.client.GetFiles(ctx, &sdk.GetFilesReq{
|
||||
CID: dir.GetID(),
|
||||
Limit: pageSize,
|
||||
Offset: offset,
|
||||
ASC: d.Addition.OrderDirection == "asc",
|
||||
O: d.Addition.OrderBy,
|
||||
// Cur: 1,
|
||||
ShowDir: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = append(res, utils.MustSliceConvert(resp.Data, func(src sdk.GetFilesResp_File) model.Obj {
|
||||
obj := Obj(src)
|
||||
return &obj
|
||||
})...)
|
||||
if len(res) >= int(resp.Count) {
|
||||
break
|
||||
}
|
||||
offset += pageSize
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (d *Open115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var ua string
|
||||
if args.Header != nil {
|
||||
ua = args.Header.Get("User-Agent")
|
||||
}
|
||||
if ua == "" {
|
||||
ua = base.UserAgent
|
||||
}
|
||||
obj, ok := file.(*Obj)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
}
|
||||
pc := obj.Pc
|
||||
resp, err := d.client.DownURL(ctx, pc, ua)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u, ok := resp[obj.GetID()]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("can't get link")
|
||||
}
|
||||
return &model.Link{
|
||||
URL: u.URL.URL,
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{ua},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Open115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp, err := d.client.Mkdir(ctx, parentDir.GetID(), dirName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Obj{
|
||||
Fid: resp.FileID,
|
||||
Pid: parentDir.GetID(),
|
||||
Fn: dirName,
|
||||
Fc: "0",
|
||||
Upt: time.Now().Unix(),
|
||||
Uet: time.Now().Unix(),
|
||||
UpPt: time.Now().Unix(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Open115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err := d.client.Move(ctx, &sdk.MoveReq{
|
||||
FileIDs: srcObj.GetID(),
|
||||
ToCid: dstDir.GetID(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
}
|
||||
|
||||
func (d *Open115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err := d.client.UpdateFile(ctx, &sdk.UpdateFileReq{
|
||||
FileID: srcObj.GetID(),
|
||||
FileNma: newName,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
obj, ok := srcObj.(*Obj)
|
||||
if ok {
|
||||
obj.Fn = newName
|
||||
}
|
||||
return srcObj, nil
|
||||
}
|
||||
|
||||
func (d *Open115) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err := d.client.Copy(ctx, &sdk.CopyReq{
|
||||
PID: dstDir.GetID(),
|
||||
FileID: srcObj.GetID(),
|
||||
NoDupli: "1",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
}
|
||||
|
||||
func (d *Open115) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
_obj, ok := obj.(*Obj)
|
||||
if !ok {
|
||||
return fmt.Errorf("can't convert obj")
|
||||
}
|
||||
_, err := d.client.DelFile(ctx, &sdk.DelFileReq{
|
||||
FileIDs: _obj.GetID(),
|
||||
ParentID: _obj.Pid,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open115) Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
err := d.WaitLimit(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sha1 := file.GetHash().GetHash(utils.SHA1)
|
||||
if len(sha1) != utils.SHA1.Width {
|
||||
_, sha1, err = stream.CacheFullInTempFileAndHash(file, utils.SHA1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
const PreHashSize int64 = 128 * utils.KB
|
||||
hashSize := PreHashSize
|
||||
if file.GetSize() < PreHashSize {
|
||||
hashSize = file.GetSize()
|
||||
}
|
||||
reader, err := file.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sha1128k, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 1. Init
|
||||
resp, err := d.client.UploadInit(ctx, &sdk.UploadInitReq{
|
||||
FileName: file.GetName(),
|
||||
FileSize: file.GetSize(),
|
||||
Target: dstDir.GetID(),
|
||||
FileID: strings.ToUpper(sha1),
|
||||
PreID: strings.ToUpper(sha1128k),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Status == 2 {
|
||||
return nil
|
||||
}
|
||||
// 2. two way verify
|
||||
if utils.SliceContains([]int{6, 7, 8}, resp.Status) {
|
||||
signCheck := strings.Split(resp.SignCheck, "-") //"sign_check": "2392148-2392298" 取2392148-2392298之间的内容(包含2392148、2392298)的sha1
|
||||
start, err := strconv.ParseInt(signCheck[0], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
end, err := strconv.ParseInt(signCheck[1], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader, err = file.RangeRead(http_range.Range{Start: start, Length: end - start + 1})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
signVal, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
resp, err = d.client.UploadInit(ctx, &sdk.UploadInitReq{
|
||||
FileName: file.GetName(),
|
||||
FileSize: file.GetSize(),
|
||||
Target: dstDir.GetID(),
|
||||
FileID: strings.ToUpper(sha1),
|
||||
PreID: strings.ToUpper(sha1128k),
|
||||
SignKey: resp.SignKey,
|
||||
SignVal: strings.ToUpper(signVal),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Status == 2 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// 3. get upload token
|
||||
tokenResp, err := d.client.UploadGetToken(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 4. upload
|
||||
err = d.multpartUpload(ctx, file, up, tokenResp, resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// func (d *Open115) GetArchiveMeta(ctx context.Context, obj model.Obj, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
// // TODO get archive file meta-info, return errs.NotImplement to use an internal archive tool, optional
|
||||
// return nil, errs.NotImplement
|
||||
// }
|
||||
|
||||
// func (d *Open115) ListArchive(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
// // TODO list args.InnerPath in the archive obj, return errs.NotImplement to use an internal archive tool, optional
|
||||
// return nil, errs.NotImplement
|
||||
// }
|
||||
|
||||
// func (d *Open115) Extract(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) (*model.Link, error) {
|
||||
// // TODO return link of file args.InnerPath in the archive obj, return errs.NotImplement to use an internal archive tool, optional
|
||||
// return nil, errs.NotImplement
|
||||
// }
|
||||
|
||||
// func (d *Open115) ArchiveDecompress(ctx context.Context, srcObj, dstDir model.Obj, args model.ArchiveDecompressArgs) ([]model.Obj, error) {
|
||||
// // TODO extract args.InnerPath path in the archive srcObj to the dstDir location, optional
|
||||
// // a folder with the same name as the archive file needs to be created to store the extracted results if args.PutIntoNewDir
|
||||
// // return errs.NotImplement to use an internal archive tool
|
||||
// return nil, errs.NotImplement
|
||||
// }
|
||||
|
||||
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*Open115)(nil)
|
||||
37
drivers/115_open/meta.go
Normal file
37
drivers/115_open/meta.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package _115_open
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
driver.RootID
|
||||
// define other
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,file_size,user_utime,file_type"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc"`
|
||||
LimitRate float64 `json:"limit_rate,string" type:"float" default:"1" help:"limit all api request rate ([limit]r/1s)"`
|
||||
AccessToken string `json:"access_token" required:"true"`
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Open",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Open115{}
|
||||
})
|
||||
}
|
||||
59
drivers/115_open/types.go
Normal file
59
drivers/115_open/types.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package _115_open
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
sdk "github.com/OpenListTeam/115-sdk-go"
|
||||
)
|
||||
|
||||
type Obj sdk.GetFilesResp_File
|
||||
|
||||
// Thumb implements model.Thumb.
|
||||
func (o *Obj) Thumb() string {
|
||||
return o.Thumbnail
|
||||
}
|
||||
|
||||
// CreateTime implements model.Obj.
|
||||
func (o *Obj) CreateTime() time.Time {
|
||||
return time.Unix(o.UpPt, 0)
|
||||
}
|
||||
|
||||
// GetHash implements model.Obj.
|
||||
func (o *Obj) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.SHA1, o.Sha1)
|
||||
}
|
||||
|
||||
// GetID implements model.Obj.
|
||||
func (o *Obj) GetID() string {
|
||||
return o.Fid
|
||||
}
|
||||
|
||||
// GetName implements model.Obj.
|
||||
func (o *Obj) GetName() string {
|
||||
return o.Fn
|
||||
}
|
||||
|
||||
// GetPath implements model.Obj.
|
||||
func (o *Obj) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetSize implements model.Obj.
|
||||
func (o *Obj) GetSize() int64 {
|
||||
return o.FS
|
||||
}
|
||||
|
||||
// IsDir implements model.Obj.
|
||||
func (o *Obj) IsDir() bool {
|
||||
return o.Fc == "0"
|
||||
}
|
||||
|
||||
// ModTime implements model.Obj.
|
||||
func (o *Obj) ModTime() time.Time {
|
||||
return time.Unix(o.Upt, 0)
|
||||
}
|
||||
|
||||
var _ model.Obj = (*Obj)(nil)
|
||||
var _ model.Thumb = (*Obj)(nil)
|
||||
140
drivers/115_open/upload.go
Normal file
140
drivers/115_open/upload.go
Normal file
@@ -0,0 +1,140 @@
|
||||
package _115_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
"github.com/avast/retry-go"
|
||||
sdk "github.com/OpenListTeam/115-sdk-go"
|
||||
)
|
||||
|
||||
func calPartSize(fileSize int64) int64 {
|
||||
var partSize int64 = 20 * utils.MB
|
||||
if fileSize > partSize {
|
||||
if fileSize > 1*utils.TB { // file Size over 1TB
|
||||
partSize = 5 * utils.GB // file part size 5GB
|
||||
} else if fileSize > 768*utils.GB { // over 768GB
|
||||
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||
} else if fileSize > 512*utils.GB { // over 512GB
|
||||
partSize = 82463373 // ≈ 78.6432MB
|
||||
} else if fileSize > 384*utils.GB { // over 384GB
|
||||
partSize = 54975582 // ≈ 52.4288MB
|
||||
} else if fileSize > 256*utils.GB { // over 256GB
|
||||
partSize = 41231687 // ≈ 39.3216MB
|
||||
} else if fileSize > 128*utils.GB { // over 128GB
|
||||
partSize = 27487791 // ≈ 26.2144MB
|
||||
}
|
||||
}
|
||||
return partSize
|
||||
}
|
||||
|
||||
func (d *Open115) singleUpload(ctx context.Context, tempF model.File, tokenResp *sdk.UploadGetTokenResp, initResp *sdk.UploadInitResp) error {
|
||||
ossClient, err := oss.New(tokenResp.Endpoint, tokenResp.AccessKeyId, tokenResp.AccessKeySecret, oss.SecurityToken(tokenResp.SecurityToken))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
bucket, err := ossClient.Bucket(initResp.Bucket)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = bucket.PutObject(initResp.Object, tempF,
|
||||
oss.Callback(base64.StdEncoding.EncodeToString([]byte(initResp.Callback.Value.Callback))),
|
||||
oss.CallbackVar(base64.StdEncoding.EncodeToString([]byte(initResp.Callback.Value.CallbackVar))),
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// type CallbackResult struct {
|
||||
// State bool `json:"state"`
|
||||
// Code int `json:"code"`
|
||||
// Message string `json:"message"`
|
||||
// Data struct {
|
||||
// PickCode string `json:"pick_code"`
|
||||
// FileName string `json:"file_name"`
|
||||
// FileSize int64 `json:"file_size"`
|
||||
// FileID string `json:"file_id"`
|
||||
// ThumbURL string `json:"thumb_url"`
|
||||
// Sha1 string `json:"sha1"`
|
||||
// Aid int `json:"aid"`
|
||||
// Cid string `json:"cid"`
|
||||
// } `json:"data"`
|
||||
// }
|
||||
|
||||
func (d *Open115) multpartUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, tokenResp *sdk.UploadGetTokenResp, initResp *sdk.UploadInitResp) error {
|
||||
fileSize := stream.GetSize()
|
||||
chunkSize := calPartSize(fileSize)
|
||||
|
||||
ossClient, err := oss.New(tokenResp.Endpoint, tokenResp.AccessKeyId, tokenResp.AccessKeySecret, oss.SecurityToken(tokenResp.SecurityToken))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
bucket, err := ossClient.Bucket(initResp.Bucket)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
imur, err := bucket.InitiateMultipartUpload(initResp.Object, oss.Sequential())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
partNum := (stream.GetSize() + chunkSize - 1) / chunkSize
|
||||
parts := make([]oss.UploadPart, partNum)
|
||||
offset := int64(0)
|
||||
for i := int64(1); i <= partNum; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
partSize := chunkSize
|
||||
if i == partNum {
|
||||
partSize = fileSize - (i-1)*chunkSize
|
||||
}
|
||||
rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
|
||||
err = retry.Do(func() error {
|
||||
_ = rd.Reset()
|
||||
rateLimitedRd := driver.NewLimitedUploadStream(ctx, rd)
|
||||
part, err := bucket.UploadPart(imur, rateLimitedRd, partSize, int(i))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parts[i-1] = part
|
||||
return nil
|
||||
},
|
||||
retry.Attempts(3),
|
||||
retry.DelayType(retry.BackOffDelay),
|
||||
retry.Delay(time.Second))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if i == partNum {
|
||||
offset = fileSize
|
||||
} else {
|
||||
offset += partSize
|
||||
}
|
||||
up(float64(offset) / float64(fileSize))
|
||||
}
|
||||
|
||||
// callbackRespBytes := make([]byte, 1024)
|
||||
_, err = bucket.CompleteMultipartUpload(
|
||||
imur,
|
||||
parts,
|
||||
oss.Callback(base64.StdEncoding.EncodeToString([]byte(initResp.Callback.Value.Callback))),
|
||||
oss.CallbackVar(base64.StdEncoding.EncodeToString([]byte(initResp.Callback.Value.CallbackVar))),
|
||||
// oss.CallbackResult(&callbackRespBytes),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
3
drivers/115_open/util.go
Normal file
3
drivers/115_open/util.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package _115_open
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
112
drivers/115_share/driver.go
Normal file
112
drivers/115_share/driver.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type Pan115Share struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan115Share) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Init(ctx context.Context) error {
|
||||
if d.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||
}
|
||||
|
||||
return d.login()
|
||||
}
|
||||
|
||||
func (d *Pan115Share) WaitLimit(ctx context.Context) error {
|
||||
if d.limiter != nil {
|
||||
return d.limiter.Wait(ctx)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files := make([]driver115.ShareFile, 0)
|
||||
fileResp, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, dir.GetID(), driver115.QueryLimit(int(d.PageSize)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, fileResp.Data.List...)
|
||||
total := fileResp.Data.Count
|
||||
count := len(fileResp.Data.List)
|
||||
for total > count {
|
||||
fileResp, err := d.client.GetShareSnap(
|
||||
d.ShareCode, d.ReceiveCode, dir.GetID(),
|
||||
driver115.QueryLimit(int(d.PageSize)), driver115.QueryOffset(count),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, fileResp.Data.List...)
|
||||
count += len(fileResp.Data.List)
|
||||
}
|
||||
|
||||
return utils.SliceConvert(files, transFunc)
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadInfo, err := d.client.DownloadByShareCode(d.ShareCode, d.ReceiveCode, file.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &model.Link{URL: downloadInfo.URL.URL}, nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan115Share)(nil)
|
||||
34
drivers/115_share/meta.go
Normal file
34
drivers/115_share/meta.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Share",
|
||||
DefaultRoot: "0",
|
||||
// OnlyProxy: true,
|
||||
// OnlyLocal: true,
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: true,
|
||||
NoUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan115Share{}
|
||||
})
|
||||
}
|
||||
111
drivers/115_share/utils.go
Normal file
111
drivers/115_share/utils.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var _ model.Obj = (*FileObj)(nil)
|
||||
|
||||
type FileObj struct {
|
||||
Size int64
|
||||
Sha1 string
|
||||
Utm time.Time
|
||||
FileName string
|
||||
isDir bool
|
||||
FileID string
|
||||
}
|
||||
|
||||
func (f *FileObj) CreateTime() time.Time {
|
||||
return f.Utm
|
||||
}
|
||||
|
||||
func (f *FileObj) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||
}
|
||||
|
||||
func (f *FileObj) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f *FileObj) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f *FileObj) ModTime() time.Time {
|
||||
return f.Utm
|
||||
}
|
||||
|
||||
func (f *FileObj) IsDir() bool {
|
||||
return f.isDir
|
||||
}
|
||||
|
||||
func (f *FileObj) GetID() string {
|
||||
return f.FileID
|
||||
}
|
||||
|
||||
func (f *FileObj) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func transFunc(sf driver115.ShareFile) (model.Obj, error) {
|
||||
timeInt, err := strconv.ParseInt(sf.UpdateTime, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var (
|
||||
utm = time.Unix(timeInt, 0)
|
||||
isDir = (sf.IsFile == 0)
|
||||
fileID = string(sf.FileID)
|
||||
)
|
||||
if isDir {
|
||||
fileID = string(sf.CategoryID)
|
||||
}
|
||||
return &FileObj{
|
||||
Size: int64(sf.Size),
|
||||
Sha1: sf.Sha1,
|
||||
Utm: utm,
|
||||
FileName: string(sf.FileName),
|
||||
isDir: isDir,
|
||||
FileID: fileID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var UserAgent = driver115.UA115Browser
|
||||
|
||||
func (d *Pan115Share) login() error {
|
||||
var err error
|
||||
opts := []driver115.Option{
|
||||
driver115.UA(UserAgent),
|
||||
}
|
||||
d.client = driver115.New(opts...)
|
||||
if _, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, ""); err != nil {
|
||||
return errors.Wrap(err, "failed to get share snap")
|
||||
}
|
||||
cr := &driver115.Credential{}
|
||||
if d.QRCodeToken != "" {
|
||||
s := &driver115.QRCodeSession{
|
||||
UID: d.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
||||
d.QRCodeToken = ""
|
||||
} else if d.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||
return errors.Wrap(err, "failed to login by cookies")
|
||||
}
|
||||
d.client.ImportCredential(cr)
|
||||
} else {
|
||||
return errors.New("missing cookie or qrcode account")
|
||||
}
|
||||
|
||||
return d.client.LoginCheck()
|
||||
}
|
||||
@@ -1,190 +0,0 @@
|
||||
package _23
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"math/rand"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
var pan123Client = resty.New()
|
||||
|
||||
type Pan123TokenResp struct {
|
||||
Code int `json:"code"`
|
||||
Data struct {
|
||||
Token string `json:"token"`
|
||||
} `json:"data"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Pan123File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt *time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
}
|
||||
|
||||
type Pan123Files struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Data struct {
|
||||
InfoList []Pan123File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Pan123DownResp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Data struct {
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func (driver Pan123) Login(account *model.Account) error {
|
||||
var resp Pan123TokenResp
|
||||
_, err := pan123Client.R().
|
||||
SetResult(&resp).
|
||||
SetBody(base.Json{
|
||||
"passport": account.Username,
|
||||
"password": account.Password,
|
||||
}).Post("https://www.123pan.com/api/user/sign_in")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Code != 200 {
|
||||
err = fmt.Errorf(resp.Message)
|
||||
account.Status = resp.Message
|
||||
} else {
|
||||
account.Status = "work"
|
||||
account.AccessToken = resp.Data.Token
|
||||
}
|
||||
_ = model.SaveAccount(account)
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver Pan123) FormatFile(file *Pan123File) *model.File {
|
||||
f := &model.File{
|
||||
Id: strconv.FormatInt(file.FileId, 10),
|
||||
Name: file.FileName,
|
||||
Size: file.Size,
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: file.UpdateAt,
|
||||
}
|
||||
if file.Type == 1 {
|
||||
f.Type = conf.FOLDER
|
||||
} else {
|
||||
f.Type = utils.GetFileType(filepath.Ext(file.FileName))
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func (driver Pan123) GetFiles(parentId string, account *model.Account) ([]Pan123File, error) {
|
||||
next := "0"
|
||||
res := make([]Pan123File, 0)
|
||||
for next != "-1" {
|
||||
var resp Pan123Files
|
||||
_, err := pan123Client.R().SetResult(&resp).
|
||||
SetHeader("authorization", "Bearer "+account.AccessToken).
|
||||
SetQueryParams(map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": next,
|
||||
"orderBy": account.OrderBy,
|
||||
"orderDirection": account.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
}).Get("https://www.123pan.com/api/file/list")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.Code != 0 {
|
||||
if resp.Code == 401 {
|
||||
err := driver.Login(account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return driver.GetFiles(parentId, account)
|
||||
}
|
||||
return nil, fmt.Errorf(resp.Message)
|
||||
}
|
||||
next = resp.Data.Next
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (driver Pan123) Post(url string, data base.Json, account *model.Account) ([]byte, error) {
|
||||
res, err := pan123Client.R().
|
||||
SetHeader("authorization", "Bearer "+account.AccessToken).
|
||||
SetBody(data).Post(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
if jsoniter.Get(body, "code").ToInt() != 0 {
|
||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (driver Pan123) GetFile(path string, account *model.Account) (*Pan123File, error) {
|
||||
dir, name := filepath.Split(path)
|
||||
dir = utils.ParsePath(dir)
|
||||
_, err := driver.Files(dir, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
parentFiles_, _ := base.GetCache(dir, account)
|
||||
parentFiles, _ := parentFiles_.([]Pan123File)
|
||||
for _, file := range parentFiles {
|
||||
if file.FileName == name {
|
||||
if file.Type != conf.FOLDER {
|
||||
return &file, err
|
||||
} else {
|
||||
return nil, base.ErrNotFile
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, base.ErrPathNotFound
|
||||
}
|
||||
|
||||
func RandStr(length int) string {
|
||||
str := "123456789abcdefghijklmnopqrstuvwxyz"
|
||||
bytes := []byte(str)
|
||||
var result []byte
|
||||
rand.Seed(time.Now().UnixNano()+ int64(rand.Intn(100)))
|
||||
for i := 0; i < length; i++ {
|
||||
result = append(result, bytes[rand.Intn(len(bytes))])
|
||||
}
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func HMAC(message string, secret string) string {
|
||||
key := []byte(secret)
|
||||
h := hmac.New(sha256.New, key)
|
||||
h.Write([]byte(message))
|
||||
// fmt.Println(h.Sum(nil))
|
||||
//sha := hex.EncodeToString(h.Sum(nil))
|
||||
// fmt.Println(sha)
|
||||
//return sha
|
||||
return string(h.Sum(nil))
|
||||
}
|
||||
|
||||
func init() {
|
||||
base.RegisterDriver(&Pan123{})
|
||||
pan123Client.SetRetryCount(3)
|
||||
}
|
||||
@@ -1,349 +1,263 @@
|
||||
package _23
|
||||
package _123
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/xml"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"github.com/gin-gonic/gin"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
url "net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/stream"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Pan123 struct{}
|
||||
|
||||
func (driver Pan123) Config() base.DriverConfig {
|
||||
return base.DriverConfig{
|
||||
Name: "123Pan",
|
||||
OnlyProxy: false,
|
||||
}
|
||||
type Pan123 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
apiRateLimit sync.Map
|
||||
}
|
||||
|
||||
func (driver Pan123) Items() []base.Item {
|
||||
return []base.Item{
|
||||
{
|
||||
Name: "username",
|
||||
Label: "username",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
Description: "account username/phone number",
|
||||
},
|
||||
{
|
||||
Name: "password",
|
||||
Label: "password",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
Description: "account password",
|
||||
},
|
||||
{
|
||||
Name: "root_folder",
|
||||
Label: "root folder file_id",
|
||||
Type: base.TypeString,
|
||||
Required: false,
|
||||
},
|
||||
{
|
||||
Name: "order_by",
|
||||
Label: "order_by",
|
||||
Type: base.TypeSelect,
|
||||
Values: "name,fileId,updateAt,createAt",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "order_direction",
|
||||
Label: "order_direction",
|
||||
Type: base.TypeSelect,
|
||||
Values: "asc,desc",
|
||||
Required: true,
|
||||
},
|
||||
}
|
||||
func (d *Pan123) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (driver Pan123) Save(account *model.Account, old *model.Account) error {
|
||||
if account.RootFolder == "" {
|
||||
account.RootFolder = "0"
|
||||
}
|
||||
err := driver.Login(account)
|
||||
func (d *Pan123) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123) Init(ctx context.Context) error {
|
||||
_, err := d.Request(UserInfo, http.MethodGet, nil, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver Pan123) File(path string, account *model.Account) (*model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
if path == "/" {
|
||||
return &model.File{
|
||||
Id: account.RootFolder,
|
||||
Name: account.Name,
|
||||
Size: 0,
|
||||
Type: conf.FOLDER,
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: account.UpdatedAt,
|
||||
}, nil
|
||||
}
|
||||
dir, name := filepath.Split(path)
|
||||
files, err := driver.Files(dir, account)
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
_, _ = d.Request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{})
|
||||
}, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(ctx, dir.GetID(), dir.GetName())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range files {
|
||||
if file.Name == name {
|
||||
return &file, nil
|
||||
}
|
||||
}
|
||||
return nil, base.ErrPathNotFound
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (driver Pan123) Files(path string, account *model.Account) ([]model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
var rawFiles []Pan123File
|
||||
cache, err := base.GetCache(path, account)
|
||||
if err == nil {
|
||||
rawFiles, _ = cache.([]Pan123File)
|
||||
} else {
|
||||
file, err := driver.File(path, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if f, ok := file.(File); ok {
|
||||
//var resp DownResp
|
||||
var headers map[string]string
|
||||
if !utils.IsLocalIPAddr(args.IP) {
|
||||
headers = map[string]string{
|
||||
//"X-Real-IP": "1.1.1.1",
|
||||
"X-Forwarded-For": args.IP,
|
||||
}
|
||||
}
|
||||
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(rawFiles) > 0 {
|
||||
_ = base.SetCache(path, rawFiles, account)
|
||||
}
|
||||
}
|
||||
files := make([]model.File, 0)
|
||||
for _, file := range rawFiles {
|
||||
files = append(files, *driver.FormatFile(&file))
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (driver Pan123) Link(path string, account *model.Account) (*base.Link, error) {
|
||||
file, err := driver.GetFile(utils.ParsePath(path), account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var resp Pan123DownResp
|
||||
_, err = pan123Client.R().SetResult(&resp).SetHeader("authorization", "Bearer "+account.AccessToken).
|
||||
SetBody(base.Json{
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"etag": file.Etag,
|
||||
"fileId": file.FileId,
|
||||
"fileName": file.FileName,
|
||||
"s3keyFlag": file.S3KeyFlag,
|
||||
"size": file.Size,
|
||||
"type": file.Type,
|
||||
}).Post("https://www.123pan.com/api/file/download_info")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.Code != 0 {
|
||||
if resp.Code == 401 {
|
||||
err := driver.Login(account)
|
||||
"etag": f.Etag,
|
||||
"fileId": f.FileId,
|
||||
"fileName": f.FileName,
|
||||
"s3keyFlag": f.S3KeyFlag,
|
||||
"size": f.Size,
|
||||
"type": f.Type,
|
||||
}
|
||||
resp, err := d.Request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadUrl := utils.Json.Get(resp, "data", "DownloadUrl").ToString()
|
||||
u, err := url.Parse(downloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nu := u.Query().Get("params")
|
||||
if nu != "" {
|
||||
du, _ := base64.StdEncoding.DecodeString(nu)
|
||||
u, err = url.Parse(string(du))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return driver.Link(path, account)
|
||||
}
|
||||
return nil, fmt.Errorf(resp.Message)
|
||||
}
|
||||
u, err := url.Parse(resp.Data.DownloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u_ := fmt.Sprintf("https://%s%s", u.Host, u.Path)
|
||||
res, err := base.NoRedirectClient.R().SetQueryParamsFromValues(u.Query()).Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(res.String())
|
||||
link := base.Link{}
|
||||
if res.StatusCode() == 302 {
|
||||
link.Url = res.Header().Get("location")
|
||||
} else {
|
||||
link.Url = resp.Data.DownloadUrl
|
||||
}
|
||||
return &link, nil
|
||||
}
|
||||
|
||||
func (driver Pan123) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
log.Debugf("pan123 path: %s", path)
|
||||
file, err := driver.File(path, account)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if !file.IsDir() {
|
||||
link, err := driver.Link(path, account)
|
||||
u_ := u.String()
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, err
|
||||
}
|
||||
file.Url = link.Url
|
||||
return file, nil, nil
|
||||
log.Debug(res.String())
|
||||
link := model.Link{
|
||||
URL: u_,
|
||||
}
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() < 300 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
link.Header = http.Header{
|
||||
"Referer": []string{"https://www.123pan.com/"},
|
||||
}
|
||||
return &link, nil
|
||||
} else {
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
}
|
||||
files, err := driver.Files(path, account)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return nil, files, nil
|
||||
}
|
||||
|
||||
func (driver Pan123) Proxy(c *gin.Context, account *model.Account) {
|
||||
c.Request.Header.Del("origin")
|
||||
}
|
||||
|
||||
func (driver Pan123) Preview(path string, account *model.Account) (interface{}, error) {
|
||||
return nil, base.ErrNotSupport
|
||||
}
|
||||
|
||||
func (driver Pan123) MakeDir(path string, account *model.Account) error {
|
||||
dir, name := filepath.Split(path)
|
||||
parentFile, err := driver.File(dir, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !parentFile.IsDir() {
|
||||
return base.ErrNotFolder
|
||||
}
|
||||
parentFileId, _ := strconv.Atoi(parentFile.Id)
|
||||
func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"etag": "",
|
||||
"fileName": name,
|
||||
"parentFileId": parentFileId,
|
||||
"fileName": dirName,
|
||||
"parentFileId": parentDir.GetID(),
|
||||
"size": 0,
|
||||
"type": 1,
|
||||
}
|
||||
_, err = driver.Post("https://www.123pan.com/api/file/upload_request", data, account)
|
||||
if err == nil {
|
||||
_ = base.DeleteCache(dir, account)
|
||||
}
|
||||
_, err := d.Request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver Pan123) Move(src string, dst string, account *model.Account) error {
|
||||
srcDir, _ := filepath.Split(src)
|
||||
dstDir, dstName := filepath.Split(dst)
|
||||
srcFile, err := driver.File(src, account)
|
||||
if err != nil {
|
||||
return err
|
||||
func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
data := base.Json{
|
||||
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
||||
"parentFileId": dstDir.GetID(),
|
||||
}
|
||||
fileId, _ := strconv.Atoi(srcFile.Id)
|
||||
// rename
|
||||
if srcDir == dstDir {
|
||||
_, err := d.Request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"fileId": srcObj.GetID(),
|
||||
"fileName": newName,
|
||||
}
|
||||
_, err := d.Request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if f, ok := obj.(File); ok {
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"fileId": fileId,
|
||||
"fileName": dstName,
|
||||
"driveId": 0,
|
||||
"operation": true,
|
||||
"fileTrashInfoList": []File{f},
|
||||
}
|
||||
_, err = driver.Post("https://www.123pan.com/api/file/rename", data, account)
|
||||
_, err := d.Request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
} else {
|
||||
// move
|
||||
dstDirFile, err := driver.File(dstDir, account)
|
||||
return fmt.Errorf("can't convert obj")
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
etag := file.GetHash().GetHash(utils.MD5)
|
||||
var err error
|
||||
if len(etag) < utils.MD5.Width {
|
||||
_, etag, err = stream.CacheFullInTempFileAndHash(file, utils.MD5)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parentFileId, _ := strconv.Atoi(dstDirFile.Id)
|
||||
data := base.Json{
|
||||
"fileId": fileId,
|
||||
"parentFileId": parentFileId,
|
||||
}
|
||||
_, err = driver.Post("https://www.123pan.com/api/file/mod_pid", data, account)
|
||||
}
|
||||
if err != nil {
|
||||
_ = base.DeleteCache(srcDir, account)
|
||||
_ = base.DeleteCache(dstDir, account)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver Pan123) Copy(src string, dst string, account *model.Account) error {
|
||||
return base.ErrNotSupport
|
||||
}
|
||||
|
||||
func (driver Pan123) Delete(path string, account *model.Account) error {
|
||||
file, err := driver.GetFile(path, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"operation": true,
|
||||
"fileTrashInfoList": file,
|
||||
}
|
||||
_, err = driver.Post("https://www.123pan.com/api/file/trash", data, account)
|
||||
if err == nil {
|
||||
_ = base.DeleteCache(utils.Dir(path), account)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type UploadResp struct {
|
||||
XMLName xml.Name `xml:"InitiateMultipartUploadResult"`
|
||||
Bucket string `xml:"Bucket"`
|
||||
Key string `xml:"Key"`
|
||||
UploadId string `xml:"UploadId"`
|
||||
}
|
||||
|
||||
// TODO unfinished
|
||||
func (driver Pan123) Upload(file *model.FileStream, account *model.Account) error {
|
||||
parentFile, err := driver.File(file.ParentPath, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !parentFile.IsDir() {
|
||||
return base.ErrNotFolder
|
||||
}
|
||||
parentFileId, _ := strconv.Atoi(parentFile.Id)
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"duplicate": true,
|
||||
"etag": RandStr(32), //maybe file's md5
|
||||
"fileName": file.GetFileName(),
|
||||
"parentFileId": parentFileId,
|
||||
"duplicate": 2, // 2->覆盖 1->重命名 0->默认
|
||||
"etag": strings.ToLower(etag),
|
||||
"fileName": file.GetName(),
|
||||
"parentFileId": dstDir.GetID(),
|
||||
"size": file.GetSize(),
|
||||
"type": 0,
|
||||
}
|
||||
res, err := driver.Post("https://www.123pan.com/api/file/upload_request", data, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
baseUrl := fmt.Sprintf("https://file.123pan.com/%s/%s", jsoniter.Get(res, "data.Bucket").ToString(), jsoniter.Get(res, "data.Key").ToString())
|
||||
var resp UploadResp
|
||||
kSecret := jsoniter.Get(res, "data.SecretAccessKey").ToString()
|
||||
nowTimeStr := time.Now().String()
|
||||
Date := strings.ReplaceAll(strings.Split(nowTimeStr, "T")[0],"-","")
|
||||
|
||||
StringToSign := fmt.Sprintf("%s\n%s\n%s\n%s",
|
||||
"AWS4-HMAC-SHA256",
|
||||
nowTimeStr,
|
||||
fmt.Sprintf("%s/us-east-1/s3/aws4_request", Date),
|
||||
)
|
||||
|
||||
kDate := HMAC("AWS4"+kSecret, Date)
|
||||
kRegion := HMAC(kDate, "us-east-1")
|
||||
kService := HMAC(kRegion, "s3")
|
||||
kSigning := HMAC(kService, "aws4_request")
|
||||
_, err = pan123Client.R().SetResult(&resp).SetHeaders(map[string]string{
|
||||
"Authorization": fmt.Sprintf("AWS4-HMAC-SHA256 Credential=%s/%s/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token;x-amz-user-agent, Signature=%s",
|
||||
jsoniter.Get(res, "data.AccessKeyId"),
|
||||
Date,
|
||||
hex.EncodeToString([]byte(HMAC(StringToSign, kSigning)))),
|
||||
"X-Amz-Content-Sha256": "UNSIGNED-PAYLOAD",
|
||||
"X-Amz-Date": nowTimeStr,
|
||||
"x-amz-security-token": jsoniter.Get(res, "data.SessionToken").ToString(),
|
||||
}).Post(fmt.Sprintf("%s?uploads", baseUrl))
|
||||
res, err := d.Request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return base.ErrNotImplement
|
||||
log.Debugln("upload request res: ", string(res))
|
||||
if resp.Data.Reuse || resp.Data.Key == "" {
|
||||
return nil
|
||||
}
|
||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||
err = d.newUpload(ctx, &resp, file, up)
|
||||
return err
|
||||
} else {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
Region: aws.String("123pan"),
|
||||
Endpoint: aws.String(resp.Data.EndPoint),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
}
|
||||
s, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
if file.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = file.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: driver.NewLimitedUploadStream(ctx, &driver.ReaderUpdatingProgress{
|
||||
Reader: file,
|
||||
UpdateProgress: up,
|
||||
}),
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
_, err = d.Request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
}).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
var _ base.Driver = (*Pan123)(nil)
|
||||
func (d *Pan123) APIRateLimit(ctx context.Context, api string) error {
|
||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
||||
limiter := value.(*rate.Limiter)
|
||||
|
||||
return limiter.Wait(ctx)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123)(nil)
|
||||
|
||||
27
drivers/123/meta.go
Normal file
27
drivers/123/meta.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
driver.RootID
|
||||
//OrderBy string `json:"order_by" type:"select" options:"file_id,file_name,size,update_at" default:"file_name"`
|
||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123Pan",
|
||||
DefaultRoot: "0",
|
||||
LocalSort: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123{}
|
||||
})
|
||||
}
|
||||
124
drivers/123/types.go
Normal file
124
drivers/123/types.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
}
|
||||
|
||||
func (f File) CreateTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
|
||||
func (f File) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
}
|
||||
|
||||
func (f File) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f File) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f File) ModTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
|
||||
func (f File) IsDir() bool {
|
||||
return f.Type == 1
|
||||
}
|
||||
|
||||
func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
//}
|
||||
//var _ model.Thumb = (*File)(nil)
|
||||
|
||||
type Files struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
Next string `json:"Next"`
|
||||
Total int `json:"Total"`
|
||||
InfoList []File `json:"InfoList"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
//type DownResp struct {
|
||||
// //BaseResp
|
||||
// Data struct {
|
||||
// DownloadUrl string `json:"DownloadUrl"`
|
||||
// } `json:"data"`
|
||||
//}
|
||||
|
||||
type UploadResp struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
AccessKeyId string `json:"AccessKeyId"`
|
||||
Bucket string `json:"Bucket"`
|
||||
Key string `json:"Key"`
|
||||
SecretAccessKey string `json:"SecretAccessKey"`
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Reuse bool `json:"Reuse"`
|
||||
EndPoint string `json:"EndPoint"`
|
||||
StorageNode string `json:"StorageNode"`
|
||||
UploadId string `json:"UploadId"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type S3PreSignedURLs struct {
|
||||
Data struct {
|
||||
PreSignedUrls map[string]string `json:"presignedUrls"`
|
||||
} `json:"data"`
|
||||
}
|
||||
162
drivers/123/upload.go
Normal file
162
drivers/123/upload.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||
data := base.Json{
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"partNumberEnd": end,
|
||||
"partNumberStart": start,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.Request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||
data := base.Json{
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"partNumberEnd": end,
|
||||
"partNumberStart": start,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.Request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.FileStreamer, isMultipart bool) error {
|
||||
data := base.Json{
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"fileId": upReq.Data.FileId,
|
||||
"fileSize": file.GetSize(),
|
||||
"isMultipart": isMultipart,
|
||||
"key": upReq.Data.Key,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
_, err := d.Request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
tmpF, err := file.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// fetch s3 pre signed urls
|
||||
size := file.GetSize()
|
||||
chunkSize := min(size, 16*utils.MB)
|
||||
chunkCount := int(size / chunkSize)
|
||||
lastChunkSize := size % chunkSize
|
||||
if lastChunkSize > 0 {
|
||||
chunkCount++
|
||||
} else {
|
||||
lastChunkSize = chunkSize
|
||||
}
|
||||
// only 1 batch is allowed
|
||||
batchSize := 1
|
||||
getS3UploadUrl := d.getS3Auth
|
||||
if chunkCount > 1 {
|
||||
batchSize = 10
|
||||
getS3UploadUrl = d.getS3PreSignedUrls
|
||||
}
|
||||
for i := 1; i <= chunkCount; i += batchSize {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
start := i
|
||||
end := min(i+batchSize, chunkCount+1)
|
||||
s3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, start, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// upload each chunk
|
||||
for j := start; j < end; j++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
curSize := chunkSize
|
||||
if j == chunkCount {
|
||||
curSize = lastChunkSize
|
||||
}
|
||||
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.NewSectionReader(tmpF, chunkSize*int64(j-1), curSize), curSize, false, getS3UploadUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(float64(j) * 100 / float64(chunkCount))
|
||||
}
|
||||
}
|
||||
// complete s3 upload
|
||||
return d.completeS3(ctx, upReq, file, chunkCount > 1)
|
||||
}
|
||||
|
||||
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader *io.SectionReader, curSize int64, retry bool, getS3UploadUrl func(ctx context.Context, upReq *UploadResp, start int, end int) (*S3PreSignedURLs, error)) error {
|
||||
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
|
||||
if uploadUrl == "" {
|
||||
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, driver.NewLimitedUploadStream(ctx, reader))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.ContentLength = curSize
|
||||
//req.Header.Set("Content-Length", strconv.FormatInt(curSize, 10))
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode == http.StatusForbidden {
|
||||
if retry {
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
|
||||
}
|
||||
// refresh s3 pre signed urls
|
||||
newS3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, cur, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
|
||||
// retry
|
||||
reader.Seek(0, io.SeekStart)
|
||||
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true, getS3UploadUrl)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d, body: %s", cur, res.StatusCode, body)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
284
drivers/123/util.go
Normal file
284
drivers/123/util.go
Normal file
@@ -0,0 +1,284 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"math"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
LoginApi = "https://login.123pan.com/api"
|
||||
MainApi = BApi
|
||||
SignIn = LoginApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
FileList = MainApi + "/file/list/new"
|
||||
DownloadInfo = MainApi + "/file/download_info"
|
||||
Mkdir = MainApi + "/file/upload_request"
|
||||
Move = MainApi + "/file/mod_pid"
|
||||
Rename = MainApi + "/file/rename"
|
||||
Trash = MainApi + "/file/trash"
|
||||
UploadRequest = MainApi + "/file/upload_request"
|
||||
UploadComplete = MainApi + "/file/upload_complete"
|
||||
S3PreSignedUrls = MainApi + "/file/s3_repare_upload_parts_batch"
|
||||
S3Auth = MainApi + "/file/s3_upload_object/auth"
|
||||
UploadCompleteV2 = MainApi + "/file/upload_complete/v2"
|
||||
S3Complete = MainApi + "/file/s3_complete_multipart_upload"
|
||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||
)
|
||||
|
||||
func signPath(path string, os string, version string) (k string, v string) {
|
||||
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
||||
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
||||
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
||||
timestamp := fmt.Sprint(now.Unix())
|
||||
nowStr := []byte(now.Format("200601021504"))
|
||||
for i := 0; i < len(nowStr); i++ {
|
||||
nowStr[i] = table[nowStr[i]-48]
|
||||
}
|
||||
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
||||
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
||||
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
||||
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
||||
}
|
||||
|
||||
func GetApi(rawUrl string) string {
|
||||
u, _ := url.Parse(rawUrl)
|
||||
query := u.Query()
|
||||
query.Add(signPath(u.Path, "web", "3"))
|
||||
u.RawQuery = query.Encode()
|
||||
return u.String()
|
||||
}
|
||||
|
||||
//func GetApi(url string) string {
|
||||
// vm := js.New()
|
||||
// vm.Set("url", url[22:])
|
||||
// r, err := vm.RunString(`
|
||||
// (function(e){
|
||||
// function A(t, e) {
|
||||
// e = 1 < arguments.length && void 0 !== e ? e : 10;
|
||||
// for (var n = function() {
|
||||
// for (var t = [], e = 0; e < 256; e++) {
|
||||
// for (var n = e, r = 0; r < 8; r++)
|
||||
// n = 1 & n ? 3988292384 ^ n >>> 1 : n >>> 1;
|
||||
// t[e] = n
|
||||
// }
|
||||
// return t
|
||||
// }(), r = function(t) {
|
||||
// t = t.replace(/\\r\\n/g, "\\n");
|
||||
// for (var e = "", n = 0; n < t.length; n++) {
|
||||
// var r = t.charCodeAt(n);
|
||||
// r < 128 ? e += String.fromCharCode(r) : e = 127 < r && r < 2048 ? (e += String.fromCharCode(r >> 6 | 192)) + String.fromCharCode(63 & r | 128) : (e = (e += String.fromCharCode(r >> 12 | 224)) + String.fromCharCode(r >> 6 & 63 | 128)) + String.fromCharCode(63 & r | 128)
|
||||
// }
|
||||
// return e
|
||||
// }(t), a = -1, i = 0; i < r.length; i++)
|
||||
// a = a >>> 8 ^ n[255 & (a ^ r.charCodeAt(i))];
|
||||
// return (a = (-1 ^ a) >>> 0).toString(e)
|
||||
// }
|
||||
//
|
||||
// function v(t) {
|
||||
// return (v = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(t) {
|
||||
// return typeof t
|
||||
// }
|
||||
// : function(t) {
|
||||
// return t && "function" == typeof Symbol && t.constructor === Symbol && t !== Symbol.prototype ? "symbol" : typeof t
|
||||
// }
|
||||
// )(t)
|
||||
// }
|
||||
//
|
||||
// for (p in a = Math.round(1e7 * Math.random()),
|
||||
// o = Math.round(((new Date).getTime() + 60 * (new Date).getTimezoneOffset() * 1e3 + 288e5) / 1e3).toString(),
|
||||
// m = ["a", "d", "e", "f", "g", "h", "l", "m", "y", "i", "j", "n", "o", "p", "k", "q", "r", "s", "t", "u", "b", "c", "v", "w", "s", "z"],
|
||||
// u = function(t, e, n) {
|
||||
// var r;
|
||||
// n = 2 < arguments.length && void 0 !== n ? n : 8;
|
||||
// return 0 === arguments.length ? null : (r = "object" === v(t) ? t : (10 === "".concat(t).length && (t = 1e3 * Number.parseInt(t)),
|
||||
// new Date(t)),
|
||||
// t += 6e4 * new Date(t).getTimezoneOffset(),
|
||||
// {
|
||||
// y: (r = new Date(t + 36e5 * n)).getFullYear(),
|
||||
// m: r.getMonth() + 1 < 10 ? "0".concat(r.getMonth() + 1) : r.getMonth() + 1,
|
||||
// d: r.getDate() < 10 ? "0".concat(r.getDate()) : r.getDate(),
|
||||
// h: r.getHours() < 10 ? "0".concat(r.getHours()) : r.getHours(),
|
||||
// f: r.getMinutes() < 10 ? "0".concat(r.getMinutes()) : r.getMinutes()
|
||||
// })
|
||||
// }(o),
|
||||
// h = u.y,
|
||||
// g = u.m,
|
||||
// l = u.d,
|
||||
// c = u.h,
|
||||
// u = u.f,
|
||||
// d = [h, g, l, c, u].join(""),
|
||||
// f = [],
|
||||
// d)
|
||||
// f.push(m[Number(d[p])]);
|
||||
// return h = A(f.join("")),
|
||||
// g = A("".concat(o, "|").concat(a, "|").concat(e, "|").concat("web", "|").concat("3", "|").concat(h)),
|
||||
// "".concat(h, "=").concat(o, "-").concat(a, "-").concat(g);
|
||||
// })(url)
|
||||
// `)
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return url
|
||||
// }
|
||||
// v, _ := r.Export().(string)
|
||||
// return url + "?" + v
|
||||
//}
|
||||
|
||||
func (d *Pan123) login() error {
|
||||
var body base.Json
|
||||
if utils.IsEmailFormat(d.Username) {
|
||||
body = base.Json{
|
||||
"mail": d.Username,
|
||||
"password": d.Password,
|
||||
"type": 2,
|
||||
}
|
||||
} else {
|
||||
body = base.Json{
|
||||
"passport": d.Username,
|
||||
"password": d.Password,
|
||||
"remember": true,
|
||||
}
|
||||
}
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"user-agent": "Dart/2.19(dart:io)-openlist",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
//"user-agent": base.UserAgent,
|
||||
}).
|
||||
SetBody(body).Post(SignIn)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if utils.Json.Get(res.Body(), "code").ToInt() != 200 {
|
||||
err = fmt.Errorf(utils.Json.Get(res.Body(), "message").ToString())
|
||||
} else {
|
||||
d.AccessToken = utils.Json.Get(res.Body(), "data", "token").ToString()
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
//func authKey(reqUrl string) (*string, error) {
|
||||
// reqURL, err := url.Parse(reqUrl)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
//
|
||||
// nowUnix := time.Now().Unix()
|
||||
// random := rand.Intn(0x989680)
|
||||
//
|
||||
// p4 := fmt.Sprintf("%d|%d|%s|%s|%s|%s", nowUnix, random, reqURL.Path, "web", "3", AuthKeySalt)
|
||||
// authKey := fmt.Sprintf("%d-%d-%x", nowUnix, random, md5.Sum([]byte(p4)))
|
||||
// return &authKey, nil
|
||||
//}
|
||||
|
||||
func (d *Pan123) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
isRetry := false
|
||||
do:
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) openlist-client",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
//"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
//authKey, err := authKey(url)
|
||||
//if err != nil {
|
||||
// return nil, err
|
||||
//}
|
||||
//req.SetQueryParam("auth-key", *authKey)
|
||||
res, err := req.Execute(method, GetApi(url))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
if code != 0 {
|
||||
if !isRetry && code == 401 {
|
||||
err := d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
isRetry = true
|
||||
goto do
|
||||
}
|
||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) getFiles(ctx context.Context, parentId string, name string) ([]File, error) {
|
||||
page := 1
|
||||
total := 0
|
||||
res := make([]File, 0)
|
||||
// 2024-02-06 fix concurrency by 123pan
|
||||
for {
|
||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": "file_id",
|
||||
"orderDirection": "desc",
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"SearchData": "",
|
||||
"Page": strconv.Itoa(page),
|
||||
"OnlyLookAbnormalFile": "0",
|
||||
"event": "homeListFile",
|
||||
"operateType": "4",
|
||||
"inDirectSpace": "false",
|
||||
}
|
||||
_res, err := d.Request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(string(_res))
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
total = resp.Data.Total
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(res) != total {
|
||||
log.Warnf("incorrect file count from remote at %s: expected %d, got %d", name, total, len(res))
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
77
drivers/123_link/driver.go
Normal file
77
drivers/123_link/driver.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"context"
|
||||
stdpath "path"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
)
|
||||
|
||||
type Pan123Link struct {
|
||||
model.Storage
|
||||
Addition
|
||||
root *Node
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan123Link) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Init(ctx context.Context) error {
|
||||
node, err := BuildTree(d.OriginURLs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
node.calSize()
|
||||
d.root = node
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
node := GetNodeFromRootByPath(d.root, path)
|
||||
return nodeToObj(node, path)
|
||||
}
|
||||
|
||||
func (d *Pan123Link) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
node := GetNodeFromRootByPath(d.root, dir.GetPath())
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
if node.isFile() {
|
||||
return nil, errs.NotFolder
|
||||
}
|
||||
return utils.SliceConvert(node.Children, func(node *Node) (model.Obj, error) {
|
||||
return nodeToObj(node, stdpath.Join(dir.GetPath(), node.Name))
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
node := GetNodeFromRootByPath(d.root, file.GetPath())
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
if node.isFile() {
|
||||
signUrl, err := SignURL(node.Url, d.PrivateKey, d.UID, time.Duration(d.ValidDuration)*time.Minute)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: signUrl,
|
||||
}, nil
|
||||
}
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123Link)(nil)
|
||||
23
drivers/123_link/meta.go
Normal file
23
drivers/123_link/meta.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
OriginURLs string `json:"origin_urls" type:"text" required:"true" default:"https://vip.123pan.com/29/folder/file.mp3" help:"structure:FolderName:\n [FileSize:][Modified:]Url"`
|
||||
PrivateKey string `json:"private_key"`
|
||||
UID uint64 `json:"uid" type:"number"`
|
||||
ValidDuration int64 `json:"valid_duration" type:"number" default:"30" help:"minutes"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123PanLink",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123Link{}
|
||||
})
|
||||
}
|
||||
152
drivers/123_link/parse.go
Normal file
152
drivers/123_link/parse.go
Normal file
@@ -0,0 +1,152 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
url2 "net/url"
|
||||
stdpath "path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// build tree from text, text structure definition:
|
||||
/**
|
||||
* FolderName:
|
||||
* [FileSize:][Modified:]Url
|
||||
*/
|
||||
/**
|
||||
* For example:
|
||||
* folder1:
|
||||
* name1:url1
|
||||
* url2
|
||||
* folder2:
|
||||
* url3
|
||||
* url4
|
||||
* url5
|
||||
* folder3:
|
||||
* url6
|
||||
* url7
|
||||
* url8
|
||||
*/
|
||||
// if there are no name, use the last segment of url as name
|
||||
func BuildTree(text string) (*Node, error) {
|
||||
lines := strings.Split(text, "\n")
|
||||
var root = &Node{Level: -1, Name: "root"}
|
||||
stack := []*Node{root}
|
||||
for _, line := range lines {
|
||||
// calculate indent
|
||||
indent := 0
|
||||
for i := 0; i < len(line); i++ {
|
||||
if line[i] != ' ' {
|
||||
break
|
||||
}
|
||||
indent++
|
||||
}
|
||||
// if indent is not a multiple of 2, it is an error
|
||||
if indent%2 != 0 {
|
||||
return nil, fmt.Errorf("the line '%s' is not a multiple of 2", line)
|
||||
}
|
||||
// calculate level
|
||||
level := indent / 2
|
||||
line = strings.TrimSpace(line[indent:])
|
||||
// if the line is empty, skip
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
// if level isn't greater than the level of the top of the stack
|
||||
// it is not the child of the top of the stack
|
||||
for level <= stack[len(stack)-1].Level {
|
||||
// pop the top of the stack
|
||||
stack = stack[:len(stack)-1]
|
||||
}
|
||||
// if the line is a folder
|
||||
if isFolder(line) {
|
||||
// create a new node
|
||||
node := &Node{
|
||||
Level: level,
|
||||
Name: strings.TrimSuffix(line, ":"),
|
||||
}
|
||||
// add the node to the top of the stack
|
||||
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
|
||||
// push the node to the stack
|
||||
stack = append(stack, node)
|
||||
} else {
|
||||
// if the line is a file
|
||||
// create a new node
|
||||
node, err := parseFileLine(line)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
node.Level = level
|
||||
// add the node to the top of the stack
|
||||
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
|
||||
}
|
||||
}
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func isFolder(line string) bool {
|
||||
return strings.HasSuffix(line, ":")
|
||||
}
|
||||
|
||||
// line definition:
|
||||
// [FileSize:][Modified:]Url
|
||||
func parseFileLine(line string) (*Node, error) {
|
||||
// if there is no url, it is an error
|
||||
if !strings.Contains(line, "http://") && !strings.Contains(line, "https://") {
|
||||
return nil, fmt.Errorf("invalid line: %s, because url is required for file", line)
|
||||
}
|
||||
index := strings.Index(line, "http://")
|
||||
if index == -1 {
|
||||
index = strings.Index(line, "https://")
|
||||
}
|
||||
url := line[index:]
|
||||
info := line[:index]
|
||||
node := &Node{
|
||||
Url: url,
|
||||
}
|
||||
name := stdpath.Base(url)
|
||||
unescape, err := url2.PathUnescape(name)
|
||||
if err == nil {
|
||||
name = unescape
|
||||
}
|
||||
node.Name = name
|
||||
if index > 0 {
|
||||
if !strings.HasSuffix(info, ":") {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file info must end with ':'", line)
|
||||
}
|
||||
info = info[:len(info)-1]
|
||||
if info == "" {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file name can't be empty", line)
|
||||
}
|
||||
infoParts := strings.Split(info, ":")
|
||||
size, err := strconv.ParseInt(infoParts[0], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file size must be an integer", line)
|
||||
}
|
||||
node.Size = size
|
||||
if len(infoParts) > 1 {
|
||||
modified, err := strconv.ParseInt(infoParts[1], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file modified must be an unix timestamp", line)
|
||||
}
|
||||
node.Modified = modified
|
||||
} else {
|
||||
node.Modified = time.Now().Unix()
|
||||
}
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func splitPath(path string) []string {
|
||||
if path == "/" {
|
||||
return []string{"root"}
|
||||
}
|
||||
parts := strings.Split(path, "/")
|
||||
parts[0] = "root"
|
||||
return parts
|
||||
}
|
||||
|
||||
func GetNodeFromRootByPath(root *Node, path string) *Node {
|
||||
return root.getByPath(splitPath(path))
|
||||
}
|
||||
66
drivers/123_link/types.go
Normal file
66
drivers/123_link/types.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
)
|
||||
|
||||
// Node is a node in the folder tree
|
||||
type Node struct {
|
||||
Url string
|
||||
Name string
|
||||
Level int
|
||||
Modified int64
|
||||
Size int64
|
||||
Children []*Node
|
||||
}
|
||||
|
||||
func (node *Node) getByPath(paths []string) *Node {
|
||||
if len(paths) == 0 || node == nil {
|
||||
return nil
|
||||
}
|
||||
if node.Name != paths[0] {
|
||||
return nil
|
||||
}
|
||||
if len(paths) == 1 {
|
||||
return node
|
||||
}
|
||||
for _, child := range node.Children {
|
||||
tmp := child.getByPath(paths[1:])
|
||||
if tmp != nil {
|
||||
return tmp
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (node *Node) isFile() bool {
|
||||
return node.Url != ""
|
||||
}
|
||||
|
||||
func (node *Node) calSize() int64 {
|
||||
if node.isFile() {
|
||||
return node.Size
|
||||
}
|
||||
var size int64 = 0
|
||||
for _, child := range node.Children {
|
||||
size += child.calSize()
|
||||
}
|
||||
node.Size = size
|
||||
return size
|
||||
}
|
||||
|
||||
func nodeToObj(node *Node, path string) (model.Obj, error) {
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
return &model.Object{
|
||||
Name: node.Name,
|
||||
Size: node.Size,
|
||||
Modified: time.Unix(node.Modified, 0),
|
||||
IsFolder: !node.isFile(),
|
||||
Path: path,
|
||||
}, nil
|
||||
}
|
||||
30
drivers/123_link/util.go
Normal file
30
drivers/123_link/util.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
func SignURL(originURL, privateKey string, uid uint64, validDuration time.Duration) (newURL string, err error) {
|
||||
if privateKey == "" {
|
||||
return originURL, nil
|
||||
}
|
||||
var (
|
||||
ts = time.Now().Add(validDuration).Unix() // 有效时间戳
|
||||
rInt = rand.Int() // 随机正整数
|
||||
objURL *url.URL
|
||||
)
|
||||
objURL, err = url.Parse(originURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
authKey := fmt.Sprintf("%d-%d-%d-%x", ts, rInt, uid, md5.Sum([]byte(fmt.Sprintf("%s-%d-%d-%d-%s",
|
||||
objURL.Path, ts, rInt, uid, privateKey))))
|
||||
v := objURL.Query()
|
||||
v.Add("auth_key", authKey)
|
||||
objURL.RawQuery = v.Encode()
|
||||
return objURL.String(), nil
|
||||
}
|
||||
129
drivers/123_open/driver.go
Normal file
129
drivers/123_open/driver.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package _123_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/stream"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
)
|
||||
|
||||
type Open123 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
}
|
||||
|
||||
func (d *Open123) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Open123) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Open123) Init(ctx context.Context) error {
|
||||
if d.UploadThread < 1 || d.UploadThread > 32 {
|
||||
d.UploadThread = 3
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) Drop(ctx context.Context) error {
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
fileLastId := int64(0)
|
||||
parentFileId, err := strconv.ParseInt(dir.GetID(), 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res := make([]File, 0)
|
||||
|
||||
for fileLastId != -1 {
|
||||
files, err := d.getFiles(parentFileId, 100, fileLastId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// 目前123panAPI请求,trashed失效,只能通过遍历过滤
|
||||
for i := range files.Data.FileList {
|
||||
if files.Data.FileList[i].Trashed == 0 {
|
||||
res = append(res, files.Data.FileList[i])
|
||||
}
|
||||
}
|
||||
fileLastId = files.Data.LastFileId
|
||||
}
|
||||
return utils.SliceConvert(res, func(src File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Open123) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
fileId, _ := strconv.ParseInt(file.GetID(), 10, 64)
|
||||
|
||||
res, err := d.getDownloadInfo(fileId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
link := model.Link{URL: res.Data.DownloadUrl}
|
||||
return &link, nil
|
||||
}
|
||||
|
||||
func (d *Open123) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
parentFileId, _ := strconv.ParseInt(parentDir.GetID(), 10, 64)
|
||||
|
||||
return d.mkdir(parentFileId, dirName)
|
||||
}
|
||||
|
||||
func (d *Open123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
toParentFileID, _ := strconv.ParseInt(dstDir.GetID(), 10, 64)
|
||||
|
||||
return d.move(srcObj.(File).FileId, toParentFileID)
|
||||
}
|
||||
|
||||
func (d *Open123) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
fileId, _ := strconv.ParseInt(srcObj.GetID(), 10, 64)
|
||||
|
||||
return d.rename(fileId, newName)
|
||||
}
|
||||
|
||||
func (d *Open123) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Open123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
fileId, _ := strconv.ParseInt(obj.GetID(), 10, 64)
|
||||
|
||||
return d.trash(fileId)
|
||||
}
|
||||
|
||||
func (d *Open123) Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
parentFileId, err := strconv.ParseInt(dstDir.GetID(), 10, 64)
|
||||
etag := file.GetHash().GetHash(utils.MD5)
|
||||
|
||||
if len(etag) < utils.MD5.Width {
|
||||
_, etag, err = stream.CacheFullInTempFileAndHash(file, utils.MD5)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
createResp, err := d.create(parentFileId, file.GetName(), etag, file.GetSize(), 2, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if createResp.Data.Reuse {
|
||||
return nil
|
||||
}
|
||||
up(10)
|
||||
|
||||
return d.Upload(ctx, file, createResp, up)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Open123)(nil)
|
||||
39
drivers/123_open/meta.go
Normal file
39
drivers/123_open/meta.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package _123_open
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// refresh_token方式的AccessToken 【对个人开发者暂未开放】
|
||||
RefreshToken string `json:"RefreshToken" required:"false"`
|
||||
|
||||
// 通过 https://www.123pan.com/developer 申请
|
||||
ClientID string `json:"ClientID" required:"false"`
|
||||
ClientSecret string `json:"ClientSecret" required:"false"`
|
||||
|
||||
// 直接写入AccessToken
|
||||
AccessToken string `json:"AccessToken" required:"false"`
|
||||
|
||||
// 用户名+密码方式登录的AccessToken可以兼容
|
||||
//Username string `json:"username" required:"false"`
|
||||
//Password string `json:"password" required:"false"`
|
||||
|
||||
// 上传线程数
|
||||
UploadThread int `json:"UploadThread" type:"number" default:"3" help:"the threads of upload"`
|
||||
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123 Open",
|
||||
DefaultRoot: "0",
|
||||
LocalSort: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Open123{}
|
||||
})
|
||||
}
|
||||
205
drivers/123_open/types.go
Normal file
205
drivers/123_open/types.go
Normal file
@@ -0,0 +1,205 @@
|
||||
package _123_open
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
)
|
||||
|
||||
type ApiInfo struct {
|
||||
url string
|
||||
qps int
|
||||
token chan struct{}
|
||||
}
|
||||
|
||||
func (a *ApiInfo) Require() {
|
||||
if a.qps > 0 {
|
||||
a.token <- struct{}{}
|
||||
}
|
||||
}
|
||||
func (a *ApiInfo) Release() {
|
||||
if a.qps > 0 {
|
||||
time.AfterFunc(time.Second, func() {
|
||||
<-a.token
|
||||
})
|
||||
}
|
||||
}
|
||||
func (a *ApiInfo) SetQPS(qps int) {
|
||||
a.qps = qps
|
||||
a.token = make(chan struct{}, qps)
|
||||
}
|
||||
func (a *ApiInfo) NowLen() int {
|
||||
return len(a.token)
|
||||
}
|
||||
func InitApiInfo(url string, qps int) *ApiInfo {
|
||||
return &ApiInfo{
|
||||
url: url,
|
||||
qps: qps,
|
||||
token: make(chan struct{}, qps),
|
||||
}
|
||||
}
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"filename"`
|
||||
Size int64 `json:"size"`
|
||||
CreateAt string `json:"createAt"`
|
||||
UpdateAt string `json:"updateAt"`
|
||||
FileId int64 `json:"fileId"`
|
||||
Type int `json:"type"`
|
||||
Etag string `json:"etag"`
|
||||
S3KeyFlag string `json:"s3KeyFlag"`
|
||||
ParentFileId int `json:"parentFileId"`
|
||||
Category int `json:"category"`
|
||||
Status int `json:"status"`
|
||||
Trashed int `json:"trashed"`
|
||||
}
|
||||
|
||||
func (f File) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.MD5, f.Etag)
|
||||
}
|
||||
|
||||
func (f File) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f File) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f File) CreateTime() time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02 15:04:05", f.CreateAt)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func (f File) ModTime() time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02 15:04:05", f.UpdateAt)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func (f File) IsDir() bool {
|
||||
return f.Type == 1
|
||||
}
|
||||
|
||||
func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
|
||||
type BaseResp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
XTraceID string `json:"x-traceID"`
|
||||
}
|
||||
|
||||
type AccessTokenResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
AccessToken string `json:"accessToken"`
|
||||
ExpiredAt string `json:"expiredAt"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type RefreshTokenResp struct {
|
||||
AccessToken string `json:"access_token"`
|
||||
ExpiresIn int `json:"expires_in"`
|
||||
RefreshToken string `json:"refresh_token"`
|
||||
Scope string `json:"scope"`
|
||||
TokenType string `json:"token_type"`
|
||||
}
|
||||
|
||||
type UserInfoResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
UID int64 `json:"uid"`
|
||||
Username string `json:"username"`
|
||||
DisplayName string `json:"displayName"`
|
||||
HeadImage string `json:"headImage"`
|
||||
Passport string `json:"passport"`
|
||||
Mail string `json:"mail"`
|
||||
SpaceUsed int64 `json:"spaceUsed"`
|
||||
SpacePermanent int64 `json:"spacePermanent"`
|
||||
SpaceTemp int64 `json:"spaceTemp"`
|
||||
SpaceTempExpr string `json:"spaceTempExpr"`
|
||||
Vip bool `json:"vip"`
|
||||
DirectTraffic int64 `json:"directTraffic"`
|
||||
IsHideUID bool `json:"isHideUID"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type FileListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
LastFileId int64 `json:"lastFileId"`
|
||||
FileList []File `json:"fileList"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type DownloadInfoResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
DownloadUrl string `json:"downloadUrl"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadCreateResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileID int64 `json:"fileID"`
|
||||
PreuploadID string `json:"preuploadID"`
|
||||
Reuse bool `json:"reuse"`
|
||||
SliceSize int64 `json:"sliceSize"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadUrlResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
PresignedURL string `json:"presignedURL"`
|
||||
}
|
||||
}
|
||||
|
||||
type UploadCompleteResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Async bool `json:"async"`
|
||||
Completed bool `json:"completed"`
|
||||
FileID int64 `json:"fileID"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadAsyncResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Completed bool `json:"completed"`
|
||||
FileID int64 `json:"fileID"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
AccessKeyId string `json:"AccessKeyId"`
|
||||
Bucket string `json:"Bucket"`
|
||||
Key string `json:"Key"`
|
||||
SecretAccessKey string `json:"SecretAccessKey"`
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Reuse bool `json:"Reuse"`
|
||||
EndPoint string `json:"EndPoint"`
|
||||
StorageNode string `json:"StorageNode"`
|
||||
UploadId string `json:"UploadId"`
|
||||
} `json:"data"`
|
||||
}
|
||||
151
drivers/123_open/upload.go
Normal file
151
drivers/123_open/upload.go
Normal file
@@ -0,0 +1,151 @@
|
||||
package _123_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/errgroup"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/http_range"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/avast/retry-go"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func (d *Open123) create(parentFileID int64, filename string, etag string, size int64, duplicate int, containDir bool) (*UploadCreateResp, error) {
|
||||
var resp UploadCreateResp
|
||||
_, err := d.Request(UploadCreate, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"parentFileId": parentFileID,
|
||||
"filename": filename,
|
||||
"etag": strings.ToLower(etag),
|
||||
"size": size,
|
||||
"duplicate": duplicate,
|
||||
"containDir": containDir,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) url(preuploadID string, sliceNo int64) (string, error) {
|
||||
// get upload url
|
||||
var resp UploadUrlResp
|
||||
_, err := d.Request(UploadUrl, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"preuploadId": preuploadID,
|
||||
"sliceNo": sliceNo,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return resp.Data.PresignedURL, nil
|
||||
}
|
||||
|
||||
func (d *Open123) complete(preuploadID string) (*UploadCompleteResp, error) {
|
||||
var resp UploadCompleteResp
|
||||
_, err := d.Request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"preuploadID": preuploadID,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) async(preuploadID string) (*UploadAsyncResp, error) {
|
||||
var resp UploadAsyncResp
|
||||
_, err := d.Request(UploadAsync, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"preuploadID": preuploadID,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) Upload(ctx context.Context, file model.FileStreamer, createResp *UploadCreateResp, up driver.UpdateProgress) error {
|
||||
size := file.GetSize()
|
||||
chunkSize := createResp.Data.SliceSize
|
||||
uploadNums := (size + chunkSize - 1) / chunkSize
|
||||
threadG, uploadCtx := errgroup.NewGroupWithContext(ctx, d.UploadThread,
|
||||
retry.Attempts(3),
|
||||
retry.Delay(time.Second),
|
||||
retry.DelayType(retry.BackOffDelay))
|
||||
|
||||
for partIndex := int64(0); partIndex < uploadNums; partIndex++ {
|
||||
if utils.IsCanceled(uploadCtx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
partIndex := partIndex
|
||||
partNumber := partIndex + 1 // 分片号从1开始
|
||||
offset := partIndex * chunkSize
|
||||
size := min(chunkSize, size-offset)
|
||||
limitedReader, err := file.RangeRead(http_range.Range{
|
||||
Start: offset,
|
||||
Length: size})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
limitedReader = driver.NewLimitedUploadStream(ctx, limitedReader)
|
||||
|
||||
threadG.Go(func(ctx context.Context) error {
|
||||
uploadPartUrl, err := d.url(createResp.Data.PreuploadID, partNumber)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "PUT", uploadPartUrl, limitedReader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.ContentLength = size
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
|
||||
progress := 10.0 + 85.0*float64(threadG.Success())/float64(uploadNums)
|
||||
up(progress)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if err := threadG.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
uploadCompleteResp, err := d.complete(createResp.Data.PreuploadID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if uploadCompleteResp.Data.Async == false || uploadCompleteResp.Data.Completed {
|
||||
return nil
|
||||
}
|
||||
|
||||
for {
|
||||
uploadAsyncResp, err := d.async(createResp.Data.PreuploadID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if uploadAsyncResp.Data.Completed {
|
||||
break
|
||||
}
|
||||
}
|
||||
up(100)
|
||||
return nil
|
||||
}
|
||||
217
drivers/123_open/util.go
Normal file
217
drivers/123_open/util.go
Normal file
@@ -0,0 +1,217 @@
|
||||
package _123_open
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var ( //不同情况下获取的AccessTokenQPS限制不同 如下模块化易于拓展
|
||||
Api = "https://open-api.123pan.com"
|
||||
|
||||
AccessToken = InitApiInfo(Api+"/api/v1/access_token", 1)
|
||||
RefreshToken = InitApiInfo(Api+"/api/v1/oauth2/access_token", 1)
|
||||
UserInfo = InitApiInfo(Api+"/api/v1/user/info", 1)
|
||||
FileList = InitApiInfo(Api+"/api/v2/file/list", 4)
|
||||
DownloadInfo = InitApiInfo(Api+"/api/v1/file/download_info", 0)
|
||||
Mkdir = InitApiInfo(Api+"/upload/v1/file/mkdir", 2)
|
||||
Move = InitApiInfo(Api+"/api/v1/file/move", 1)
|
||||
Rename = InitApiInfo(Api+"/api/v1/file/name", 1)
|
||||
Trash = InitApiInfo(Api+"/api/v1/file/trash", 2)
|
||||
UploadCreate = InitApiInfo(Api+"/upload/v1/file/create", 2)
|
||||
UploadUrl = InitApiInfo(Api+"/upload/v1/file/get_upload_url", 0)
|
||||
UploadComplete = InitApiInfo(Api+"/upload/v1/file/upload_complete", 0)
|
||||
UploadAsync = InitApiInfo(Api+"/upload/v1/file/upload_async_result", 1)
|
||||
)
|
||||
|
||||
func (d *Open123) Request(apiInfo *ApiInfo, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
retryToken := true
|
||||
for {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"platform": "open_platform",
|
||||
"Content-Type": "application/json",
|
||||
})
|
||||
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
|
||||
log.Debugf("API: %s, QPS: %d, NowLen: %d", apiInfo.url, apiInfo.qps, apiInfo.NowLen())
|
||||
|
||||
apiInfo.Require()
|
||||
defer apiInfo.Release()
|
||||
res, err := req.Execute(method, apiInfo.url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
|
||||
// 解析为通用响应
|
||||
var baseResp BaseResp
|
||||
if err = json.Unmarshal(body, &baseResp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if baseResp.Code == 0 {
|
||||
return body, nil
|
||||
} else if baseResp.Code == 401 && retryToken {
|
||||
retryToken = false
|
||||
if err := d.flushAccessToken(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if baseResp.Code == 429 {
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
log.Warningf("API: %s, QPS: %d, 请求太频繁,对应API提示过多请减小QPS", apiInfo.url, apiInfo.qps)
|
||||
} else {
|
||||
return nil, errors.New(baseResp.Message)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (d *Open123) flushAccessToken() error {
|
||||
if d.Addition.ClientID != "" {
|
||||
if d.Addition.ClientSecret != "" {
|
||||
var resp AccessTokenResp
|
||||
_, err := d.Request(AccessToken, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"clientID": d.ClientID,
|
||||
"clientSecret": d.ClientSecret,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.AccessToken = resp.Data.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
} else if d.Addition.RefreshToken != "" {
|
||||
var resp RefreshTokenResp
|
||||
_, err := d.Request(RefreshToken, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetQueryParam("client_id", d.ClientID)
|
||||
req.SetQueryParam("grant_type", "refresh_token")
|
||||
req.SetQueryParam("refresh_token", d.Addition.RefreshToken)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.AccessToken = resp.AccessToken
|
||||
d.RefreshToken = resp.RefreshToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) getUserInfo() (*UserInfoResp, error) {
|
||||
var resp UserInfoResp
|
||||
|
||||
if _, err := d.Request(UserInfo, http.MethodGet, nil, &resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) getFiles(parentFileId int64, limit int, lastFileId int64) (*FileListResp, error) {
|
||||
var resp FileListResp
|
||||
|
||||
_, err := d.Request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(
|
||||
map[string]string{
|
||||
"parentFileId": strconv.FormatInt(parentFileId, 10),
|
||||
"limit": strconv.Itoa(limit),
|
||||
"lastFileId": strconv.FormatInt(lastFileId, 10),
|
||||
"trashed": "false",
|
||||
"searchMode": "",
|
||||
"searchData": "",
|
||||
})
|
||||
}, &resp)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) getDownloadInfo(fileId int64) (*DownloadInfoResp, error) {
|
||||
var resp DownloadInfoResp
|
||||
|
||||
_, err := d.Request(DownloadInfo, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"fileId": strconv.FormatInt(fileId, 10),
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Open123) mkdir(parentID int64, name string) error {
|
||||
_, err := d.Request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"parentID": strconv.FormatInt(parentID, 10),
|
||||
"name": name,
|
||||
})
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) move(fileID, toParentFileID int64) error {
|
||||
_, err := d.Request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileIDs": []int64{fileID},
|
||||
"toParentFileID": toParentFileID,
|
||||
})
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) rename(fileId int64, fileName string) error {
|
||||
_, err := d.Request(Rename, http.MethodPut, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": fileId,
|
||||
"fileName": fileName,
|
||||
})
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Open123) trash(fileId int64) error {
|
||||
_, err := d.Request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileIDs": []int64{fileId},
|
||||
})
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
174
drivers/123_share/driver.go
Normal file
174
drivers/123_share/driver.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
_123 "github.com/OpenListTeam/OpenList/v4/drivers/123"
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Pan123Share struct {
|
||||
model.Storage
|
||||
Addition
|
||||
apiRateLimit sync.Map
|
||||
ref *_123.Pan123
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan123Share) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Init(ctx context.Context) error {
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*_123.Pan123)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("ref: storage is not 123Pan")
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Drop(ctx context.Context) error {
|
||||
d.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
// TODO return the files list, required
|
||||
files, err := d.getFiles(ctx, dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
// TODO return link of file, required
|
||||
if f, ok := file.(File); ok {
|
||||
//var resp DownResp
|
||||
var headers map[string]string
|
||||
if !utils.IsLocalIPAddr(args.IP) {
|
||||
headers = map[string]string{
|
||||
//"X-Real-IP": "1.1.1.1",
|
||||
"X-Forwarded-For": args.IP,
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"shareKey": d.ShareKey,
|
||||
"SharePwd": d.SharePwd,
|
||||
"etag": f.Etag,
|
||||
"fileId": f.FileId,
|
||||
"s3keyFlag": f.S3KeyFlag,
|
||||
"size": f.Size,
|
||||
}
|
||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadUrl := utils.Json.Get(resp, "data", "DownloadURL").ToString()
|
||||
u, err := url.Parse(downloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nu := u.Query().Get("params")
|
||||
if nu != "" {
|
||||
du, _ := base64.StdEncoding.DecodeString(nu)
|
||||
u, err = url.Parse(string(du))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
u_ := u.String()
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(res.String())
|
||||
link := model.Link{
|
||||
URL: u_,
|
||||
}
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() < 300 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
link.Header = http.Header{
|
||||
"Referer": []string{"https://www.123pan.com/"},
|
||||
}
|
||||
return &link, nil
|
||||
}
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
}
|
||||
|
||||
func (d *Pan123Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
// TODO create folder, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO move obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
// TODO rename obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Remove(ctx context.Context, obj model.Obj) error {
|
||||
// TODO remove obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// TODO upload file, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
//func (d *Pan123Share) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
func (d *Pan123Share) APIRateLimit(ctx context.Context, api string) error {
|
||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
||||
limiter := value.(*rate.Limiter)
|
||||
|
||||
return limiter.Wait(ctx)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123Share)(nil)
|
||||
35
drivers/123_share/meta.go
Normal file
35
drivers/123_share/meta.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
ShareKey string `json:"sharekey" required:"true"`
|
||||
SharePwd string `json:"sharepassword"`
|
||||
driver.RootID
|
||||
//OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string `json:"accesstoken" type:"text"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123PanShare",
|
||||
LocalSort: true,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: true,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123Share{}
|
||||
})
|
||||
}
|
||||
100
drivers/123_share/types.go
Normal file
100
drivers/123_share/types.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
}
|
||||
|
||||
func (f File) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
}
|
||||
|
||||
func (f File) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f File) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f File) ModTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
func (f File) CreateTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
|
||||
func (f File) IsDir() bool {
|
||||
return f.Type == 1
|
||||
}
|
||||
|
||||
func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
//}
|
||||
//var _ model.Thumb = (*File)(nil)
|
||||
|
||||
type Files struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
InfoList []File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
//type DownResp struct {
|
||||
// //BaseResp
|
||||
// Data struct {
|
||||
// DownloadUrl string `json:"DownloadUrl"`
|
||||
// } `json:"data"`
|
||||
//}
|
||||
120
drivers/123_share/util.go
Normal file
120
drivers/123_share/util.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"math"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = BApi
|
||||
FileList = MainApi + "/share/get"
|
||||
DownloadInfo = MainApi + "/share/download/info"
|
||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||
)
|
||||
|
||||
func signPath(path string, os string, version string) (k string, v string) {
|
||||
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
||||
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
||||
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
||||
timestamp := fmt.Sprint(now.Unix())
|
||||
nowStr := []byte(now.Format("200601021504"))
|
||||
for i := 0; i < len(nowStr); i++ {
|
||||
nowStr[i] = table[nowStr[i]-48]
|
||||
}
|
||||
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
||||
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
||||
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
||||
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
||||
}
|
||||
|
||||
func GetApi(rawUrl string) string {
|
||||
u, _ := url.Parse(rawUrl)
|
||||
query := u.Query()
|
||||
query.Add(signPath(u.Path, "web", "3"))
|
||||
u.RawQuery = query.Encode()
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
if d.ref != nil {
|
||||
return d.ref.Request(url, method, callback, resp)
|
||||
}
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) openlist-client",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
//"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
res, err := req.Execute(method, GetApi(url))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
if code != 0 {
|
||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) getFiles(ctx context.Context, parentId string) ([]File, error) {
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
for {
|
||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": "file_id",
|
||||
"orderDirection": "desc",
|
||||
"parentFileId": parentId,
|
||||
"Page": strconv.Itoa(page),
|
||||
"shareKey": d.ShareKey,
|
||||
"SharePwd": d.SharePwd,
|
||||
}
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
873
drivers/139/driver.go
Normal file
873
drivers/139/driver.go
Normal file
@@ -0,0 +1,873 @@
|
||||
package _139
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/errs"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
streamPkg "github.com/OpenListTeam/OpenList/v4/internal/stream"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/cron"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils/random"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Yun139 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
cron *cron.Cron
|
||||
Account string
|
||||
ref *Yun139
|
||||
PersonalCloudHost string
|
||||
}
|
||||
|
||||
func (d *Yun139) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Yun139) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Yun139) Init(ctx context.Context) error {
|
||||
if d.ref == nil {
|
||||
if len(d.Authorization) == 0 {
|
||||
return fmt.Errorf("authorization is empty")
|
||||
}
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Query Route Policy
|
||||
var resp QueryRoutePolicyResp
|
||||
_, err = d.requestRoute(base.Json{
|
||||
"userInfo": base.Json{
|
||||
"userType": 1,
|
||||
"accountType": 1,
|
||||
"accountName": d.Account},
|
||||
"modAddrType": 1,
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, policyItem := range resp.Data.RoutePolicyList {
|
||||
if policyItem.ModName == "personal" {
|
||||
d.PersonalCloudHost = policyItem.HttpsUrl
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(d.PersonalCloudHost) == 0 {
|
||||
return fmt.Errorf("PersonalCloudHost is empty")
|
||||
}
|
||||
|
||||
d.cron = cron.NewCron(time.Hour * 12)
|
||||
d.cron.Do(func() {
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
log.Errorf("%+v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "/"
|
||||
}
|
||||
case MetaPersonal:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "root"
|
||||
}
|
||||
case MetaGroup:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = d.CloudID
|
||||
}
|
||||
case MetaFamily:
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Yun139) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*Yun139)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Yun139) Drop(ctx context.Context) error {
|
||||
if d.cron != nil {
|
||||
d.cron.Stop()
|
||||
}
|
||||
d.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
return d.personalGetFiles(dir.GetID())
|
||||
case MetaPersonal:
|
||||
return d.getFiles(dir.GetID())
|
||||
case MetaFamily:
|
||||
return d.familyGetFiles(dir.GetID())
|
||||
case MetaGroup:
|
||||
return d.groupGetFiles(dir.GetID())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var url string
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
url, err = d.personalGetLink(file.GetID())
|
||||
case MetaPersonal:
|
||||
url, err = d.getLink(file.GetID())
|
||||
case MetaFamily:
|
||||
url, err = d.familyGetLink(file.GetID(), file.GetPath())
|
||||
case MetaGroup:
|
||||
url, err = d.groupGetLink(file.GetID(), file.GetPath())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{URL: url}, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"parentFileId": parentDir.GetID(),
|
||||
"name": dirName,
|
||||
"description": "",
|
||||
"type": "folder",
|
||||
"fileRenameMode": "force_rename",
|
||||
}
|
||||
pathname := "/file/create"
|
||||
_, err = d.personalPost(pathname, data, nil)
|
||||
case MetaPersonal:
|
||||
data := base.Json{
|
||||
"createCatalogExtReq": base.Json{
|
||||
"parentCatalogID": parentDir.GetID(),
|
||||
"newCatalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaFamily:
|
||||
data := base.Json{
|
||||
"cloudID": d.CloudID,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"docLibName": dirName,
|
||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
||||
}
|
||||
pathname := "/orchestration/familyCloud-rebuild/cloudCatalog/v1.0/createCloudDoc"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaGroup:
|
||||
data := base.Json{
|
||||
"catalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"groupID": d.CloudID,
|
||||
"parentFileId": parentDir.GetID(),
|
||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/catalog/v1.0/createGroupCatalog"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{srcObj.GetID()},
|
||||
"toParentFileId": dstDir.GetID(),
|
||||
}
|
||||
pathname := "/file/batchMove"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
case MetaGroup:
|
||||
var contentList []string
|
||||
var catalogList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogList = append(catalogList, srcObj.GetID())
|
||||
} else {
|
||||
contentList = append(contentList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"taskType": 3,
|
||||
"srcType": 2,
|
||||
"srcGroupID": d.CloudID,
|
||||
"destType": 2,
|
||||
"destGroupID": d.CloudID,
|
||||
"destPath": dstDir.GetPath(),
|
||||
"contentList": contentList,
|
||||
"catalogList": catalogList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
case MetaPersonal:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": "304",
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileId": srcObj.GetID(),
|
||||
"name": newName,
|
||||
"description": "",
|
||||
}
|
||||
pathname := "/file/update"
|
||||
_, err = d.personalPost(pathname, data, nil)
|
||||
case MetaPersonal:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
data = base.Json{
|
||||
"catalogID": srcObj.GetID(),
|
||||
"catalogName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
|
||||
} else {
|
||||
data = base.Json{
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaGroup:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
data = base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"modifyCatalogID": srcObj.GetID(),
|
||||
"modifyCatalogName": newName,
|
||||
"path": srcObj.GetPath(),
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/group-rebuild/catalog/v1.0/modifyGroupCatalog"
|
||||
} else {
|
||||
data = base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"path": srcObj.GetPath(),
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/group-rebuild/content/v1.0/modifyGroupContent"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaFamily:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
// 网页接口不支持重命名家庭云文件夹
|
||||
// data = base.Json{
|
||||
// "catalogType": 3,
|
||||
// "catalogID": srcObj.GetID(),
|
||||
// "catalogName": newName,
|
||||
// "commonAccountInfo": base.Json{
|
||||
// "account": d.getAccount(),
|
||||
// "accountType": 1,
|
||||
// },
|
||||
// "path": srcObj.GetPath(),
|
||||
// }
|
||||
// pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyCatalogInfo"
|
||||
return errs.NotImplement
|
||||
} else {
|
||||
data = base.Json{
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"path": srcObj.GetPath(),
|
||||
}
|
||||
pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyContentInfo"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{srcObj.GetID()},
|
||||
"toParentFileId": dstDir.GetID(),
|
||||
}
|
||||
pathname := "/file/batchCopy"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
return err
|
||||
case MetaPersonal:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": 309,
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{obj.GetID()},
|
||||
}
|
||||
pathname := "/recyclebin/batchTrash"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
return err
|
||||
case MetaGroup:
|
||||
var contentList []string
|
||||
var catalogList []string
|
||||
// 必须使用完整路径删除
|
||||
if obj.IsDir() {
|
||||
catalogList = append(catalogList, obj.GetPath())
|
||||
} else {
|
||||
contentList = append(contentList, path.Join(obj.GetPath(), obj.GetID()))
|
||||
}
|
||||
data := base.Json{
|
||||
"taskType": 2,
|
||||
"srcGroupID": d.CloudID,
|
||||
"contentList": contentList,
|
||||
"catalogList": catalogList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if obj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, obj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, obj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 2,
|
||||
"actionType": 201,
|
||||
"taskInfo": base.Json{
|
||||
"newCatalogID": "",
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
if d.isFamily() {
|
||||
data = base.Json{
|
||||
"catalogList": catalogInfoList,
|
||||
"contentList": contentInfoList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"sourceCloudID": d.CloudID,
|
||||
"sourceCatalogType": 1002,
|
||||
"taskType": 2,
|
||||
"path": obj.GetPath(),
|
||||
}
|
||||
pathname = "/orchestration/familyCloud-rebuild/batchOprTask/v1.0/createBatchOprTask"
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) getPartSize(size int64) int64 {
|
||||
if d.CustomUploadPartSize != 0 {
|
||||
return d.CustomUploadPartSize
|
||||
}
|
||||
// 网盘对于分片数量存在上限
|
||||
if size/utils.GB > 30 {
|
||||
return 512 * utils.MB
|
||||
}
|
||||
return 100 * utils.MB
|
||||
}
|
||||
|
||||
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
var err error
|
||||
fullHash := stream.GetHash().GetHash(utils.SHA256)
|
||||
if len(fullHash) != utils.SHA256.Width {
|
||||
_, fullHash, err = streamPkg.CacheFullInTempFileAndHash(stream, utils.SHA256)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
size := stream.GetSize()
|
||||
var partSize = d.getPartSize(size)
|
||||
part := size / partSize
|
||||
if size%partSize > 0 {
|
||||
part++
|
||||
} else if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
partInfos := make([]PartInfo, 0, part)
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
start := i * partSize
|
||||
byteSize := size - start
|
||||
if byteSize > partSize {
|
||||
byteSize = partSize
|
||||
}
|
||||
partNumber := i + 1
|
||||
partInfo := PartInfo{
|
||||
PartNumber: partNumber,
|
||||
PartSize: byteSize,
|
||||
ParallelHashCtx: ParallelHashCtx{
|
||||
PartOffset: start,
|
||||
},
|
||||
}
|
||||
partInfos = append(partInfos, partInfo)
|
||||
}
|
||||
|
||||
// 筛选出前 100 个 partInfos
|
||||
firstPartInfos := partInfos
|
||||
if len(firstPartInfos) > 100 {
|
||||
firstPartInfos = firstPartInfos[:100]
|
||||
}
|
||||
|
||||
// 创建任务,获取上传信息和前100个分片的上传地址
|
||||
data := base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"contentType": "application/octet-stream",
|
||||
"parallelUpload": false,
|
||||
"partInfos": firstPartInfos,
|
||||
"size": size,
|
||||
"parentFileId": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"fileRenameMode": "auto_rename",
|
||||
}
|
||||
pathname := "/file/create"
|
||||
var resp PersonalUploadResp
|
||||
_, err = d.personalPost(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 判断文件是否已存在
|
||||
// resp.Data.Exist: true 已存在同名文件且校验相同,云端不会重复增加文件,无需手动处理冲突
|
||||
if resp.Data.Exist {
|
||||
return nil
|
||||
}
|
||||
|
||||
// 判断文件是否支持快传
|
||||
// resp.Data.RapidUpload: true 支持快传,但此处直接检测是否返回分片的上传地址
|
||||
// 快传的情况下同样需要手动处理冲突
|
||||
if resp.Data.PartInfos != nil {
|
||||
// 读取前100个分片的上传地址
|
||||
uploadPartInfos := resp.Data.PartInfos
|
||||
|
||||
// 获取后续分片的上传地址
|
||||
for i := 101; i < len(partInfos); i += 100 {
|
||||
end := i + 100
|
||||
if end > len(partInfos) {
|
||||
end = len(partInfos)
|
||||
}
|
||||
batchPartInfos := partInfos[i:end]
|
||||
|
||||
moredata := base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
"partInfos": batchPartInfos,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/file/getUploadUrl"
|
||||
var moreresp PersonalUploadUrlResp
|
||||
_, err = d.personalPost(pathname, moredata, &moreresp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadPartInfos = append(uploadPartInfos, moreresp.Data.PartInfos...)
|
||||
}
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(size, up)
|
||||
|
||||
rateLimited := driver.NewLimitedUploadStream(ctx, stream)
|
||||
// 上传所有分片
|
||||
for _, uploadPartInfo := range uploadPartInfos {
|
||||
index := uploadPartInfo.PartNumber - 1
|
||||
partSize := partInfos[index].PartSize
|
||||
log.Debugf("[139] uploading part %+v/%+v", index, len(uploadPartInfos))
|
||||
limitReader := io.LimitReader(rateLimited, partSize)
|
||||
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
|
||||
req, err := http.NewRequest("PUT", uploadPartInfo.UploadUrl, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Content-Length", fmt.Sprint(partSize))
|
||||
req.Header.Set("Origin", "https://yun.139.com")
|
||||
req.Header.Set("Referer", "https://yun.139.com/")
|
||||
req.ContentLength = partSize
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("[139] uploaded: %+v", res)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
data = base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
}
|
||||
_, err = d.personalPost("/file/complete", data, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// 处理冲突
|
||||
if resp.Data.FileName != stream.GetName() {
|
||||
log.Debugf("[139] conflict detected: %s != %s", resp.Data.FileName, stream.GetName())
|
||||
// 给服务器一定时间处理数据,避免无法刷新文件列表
|
||||
time.Sleep(time.Millisecond * 500)
|
||||
// 刷新并获取文件列表
|
||||
files, err := d.List(ctx, dstDir, model.ListArgs{Refresh: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 删除旧文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == stream.GetName() {
|
||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
||||
// 删除前重命名旧文件,避免仍旧冲突
|
||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = d.Remove(ctx, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
// 重命名新文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == resp.Data.FileName {
|
||||
log.Debugf("[139] conflict: renaming new: %s => %s", file.GetName(), stream.GetName())
|
||||
err = d.Rename(ctx, file, stream.GetName())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
// 处理冲突
|
||||
// 获取文件列表
|
||||
files, err := d.List(ctx, dstDir, model.ListArgs{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 删除旧文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == stream.GetName() {
|
||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
||||
// 删除前重命名旧文件,避免仍旧冲突
|
||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = d.Remove(ctx, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
var reportSize int64
|
||||
if d.ReportRealSize {
|
||||
reportSize = stream.GetSize()
|
||||
} else {
|
||||
reportSize = 0
|
||||
}
|
||||
data := base.Json{
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"fileCount": 1,
|
||||
"totalSize": reportSize,
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": reportSize,
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
"parentCatalogID": dstDir.GetID(),
|
||||
"newCatalogName": "",
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||
if d.isFamily() {
|
||||
data = d.newJson(base.Json{
|
||||
"fileCount": 1,
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"path": path.Join(dstDir.GetPath(), dstDir.GetID()),
|
||||
"seqNo": random.String(32), //序列号不能为空
|
||||
"totalSize": reportSize,
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": reportSize,
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
})
|
||||
pathname = "/orchestration/familyCloud-rebuild/content/v1.0/getFileUploadURL"
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err = d.post(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Data.Result.ResultCode != "0" {
|
||||
return fmt.Errorf("get file upload url failed with result code: %s, message: %s", resp.Data.Result.ResultCode, resp.Data.Result.ResultDesc)
|
||||
}
|
||||
|
||||
size := stream.GetSize()
|
||||
// Progress
|
||||
p := driver.NewProgress(size, up)
|
||||
var partSize = d.getPartSize(size)
|
||||
part := size / partSize
|
||||
if size%partSize > 0 {
|
||||
part++
|
||||
} else if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
rateLimited := driver.NewLimitedUploadStream(ctx, stream)
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
start := i * partSize
|
||||
byteSize := min(size-start, partSize)
|
||||
|
||||
limitReader := io.LimitReader(rateLimited, byteSize)
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
||||
req.Header.Set("contentSize", strconv.FormatInt(size, 10))
|
||||
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
||||
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
||||
req.Header.Set("rangeType", "0")
|
||||
req.ContentLength = byteSize
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
res.Body.Close()
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
bodyBytes, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error reading response body: %v", err)
|
||||
}
|
||||
var result InterLayerUploadResult
|
||||
err = xml.Unmarshal(bodyBytes, &result)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error parsing XML: %v", err)
|
||||
}
|
||||
if result.ResultCode != 0 {
|
||||
return fmt.Errorf("upload failed with result code: %d, message: %s", result.ResultCode, result.Msg)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
var resp base.Json
|
||||
var uri string
|
||||
data := base.Json{
|
||||
"category": "video",
|
||||
"fileId": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "video_preview":
|
||||
uri = "/videoPreview/getPreviewInfo"
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.personalPost(uri, data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp["data"], nil
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Yun139)(nil)
|
||||
31
drivers/139/meta.go
Normal file
31
drivers/139/meta.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package _139
|
||||
|
||||
import (
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/driver"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
//Account string `json:"account" required:"true"`
|
||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||
driver.RootID
|
||||
Type string `json:"type" type:"select" options:"personal_new,family,group,personal" default:"personal_new"`
|
||||
CloudID string `json:"cloud_id"`
|
||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
||||
ReportRealSize bool `json:"report_real_size" type:"bool" default:"true" help:"Enable to report the real file size during upload"`
|
||||
UseLargeThumbnail bool `json:"use_large_thumbnail" type:"bool" default:"false" help:"Enable to use large thumbnail for images"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "139Yun",
|
||||
LocalSort: true,
|
||||
ProxyRangeOption: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
d := &Yun139{}
|
||||
d.ProxyRange = true
|
||||
return d
|
||||
})
|
||||
}
|
||||
314
drivers/139/types.go
Normal file
314
drivers/139/types.go
Normal file
@@ -0,0 +1,314 @@
|
||||
package _139
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
)
|
||||
|
||||
const (
|
||||
MetaPersonal string = "personal"
|
||||
MetaFamily string = "family"
|
||||
MetaGroup string = "group"
|
||||
MetaPersonalNew string = "personal_new"
|
||||
)
|
||||
|
||||
type BaseResp struct {
|
||||
Success bool `json:"success"`
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Catalog struct {
|
||||
CatalogID string `json:"catalogID"`
|
||||
CatalogName string `json:"catalogName"`
|
||||
//CatalogType int `json:"catalogType"`
|
||||
CreateTime string `json:"createTime"`
|
||||
UpdateTime string `json:"updateTime"`
|
||||
//IsShared bool `json:"isShared"`
|
||||
//CatalogLevel int `json:"catalogLevel"`
|
||||
//ShareDoneeCount int `json:"shareDoneeCount"`
|
||||
//OpenType int `json:"openType"`
|
||||
//ParentCatalogID string `json:"parentCatalogId"`
|
||||
//DirEtag int `json:"dirEtag"`
|
||||
//Tombstoned int `json:"tombstoned"`
|
||||
//ProxyID interface{} `json:"proxyID"`
|
||||
//Moved int `json:"moved"`
|
||||
//IsFixedDir int `json:"isFixedDir"`
|
||||
//IsSynced interface{} `json:"isSynced"`
|
||||
//Owner string `json:"owner"`
|
||||
//Modifier interface{} `json:"modifier"`
|
||||
//Path string `json:"path"`
|
||||
//ShareType int `json:"shareType"`
|
||||
//SoftLink interface{} `json:"softLink"`
|
||||
//ExtProp1 interface{} `json:"extProp1"`
|
||||
//ExtProp2 interface{} `json:"extProp2"`
|
||||
//ExtProp3 interface{} `json:"extProp3"`
|
||||
//ExtProp4 interface{} `json:"extProp4"`
|
||||
//ExtProp5 interface{} `json:"extProp5"`
|
||||
//ETagOprType int `json:"ETagOprType"`
|
||||
}
|
||||
|
||||
type Content struct {
|
||||
ContentID string `json:"contentID"`
|
||||
ContentName string `json:"contentName"`
|
||||
//ContentSuffix string `json:"contentSuffix"`
|
||||
ContentSize int64 `json:"contentSize"`
|
||||
//ContentDesc string `json:"contentDesc"`
|
||||
//ContentType int `json:"contentType"`
|
||||
//ContentOrigin int `json:"contentOrigin"`
|
||||
CreateTime string `json:"createTime"`
|
||||
UpdateTime string `json:"updateTime"`
|
||||
//CommentCount int `json:"commentCount"`
|
||||
ThumbnailURL string `json:"thumbnailURL"`
|
||||
//BigthumbnailURL string `json:"bigthumbnailURL"`
|
||||
//PresentURL string `json:"presentURL"`
|
||||
//PresentLURL string `json:"presentLURL"`
|
||||
//PresentHURL string `json:"presentHURL"`
|
||||
//ContentTAGList interface{} `json:"contentTAGList"`
|
||||
//ShareDoneeCount int `json:"shareDoneeCount"`
|
||||
//Safestate int `json:"safestate"`
|
||||
//Transferstate int `json:"transferstate"`
|
||||
//IsFocusContent int `json:"isFocusContent"`
|
||||
//UpdateShareTime interface{} `json:"updateShareTime"`
|
||||
//UploadTime string `json:"uploadTime"`
|
||||
//OpenType int `json:"openType"`
|
||||
//AuditResult int `json:"auditResult"`
|
||||
//ParentCatalogID string `json:"parentCatalogId"`
|
||||
//Channel string `json:"channel"`
|
||||
//GeoLocFlag string `json:"geoLocFlag"`
|
||||
Digest string `json:"digest"`
|
||||
//Version string `json:"version"`
|
||||
//FileEtag string `json:"fileEtag"`
|
||||
//FileVersion string `json:"fileVersion"`
|
||||
//Tombstoned int `json:"tombstoned"`
|
||||
//ProxyID string `json:"proxyID"`
|
||||
//Moved int `json:"moved"`
|
||||
//MidthumbnailURL string `json:"midthumbnailURL"`
|
||||
//Owner string `json:"owner"`
|
||||
//Modifier string `json:"modifier"`
|
||||
//ShareType int `json:"shareType"`
|
||||
//ExtInfo struct {
|
||||
// Uploader string `json:"uploader"`
|
||||
// Address string `json:"address"`
|
||||
//} `json:"extInfo"`
|
||||
//Exif struct {
|
||||
// CreateTime string `json:"createTime"`
|
||||
// Longitude interface{} `json:"longitude"`
|
||||
// Latitude interface{} `json:"latitude"`
|
||||
// LocalSaveTime interface{} `json:"localSaveTime"`
|
||||
//} `json:"exif"`
|
||||
//CollectionFlag interface{} `json:"collectionFlag"`
|
||||
//TreeInfo interface{} `json:"treeInfo"`
|
||||
//IsShared bool `json:"isShared"`
|
||||
//ETagOprType int `json:"ETagOprType"`
|
||||
}
|
||||
|
||||
type GetDiskResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Result struct {
|
||||
ResultCode string `json:"resultCode"`
|
||||
ResultDesc interface{} `json:"resultDesc"`
|
||||
} `json:"result"`
|
||||
GetDiskResult struct {
|
||||
ParentCatalogID string `json:"parentCatalogID"`
|
||||
NodeCount int `json:"nodeCount"`
|
||||
CatalogList []Catalog `json:"catalogList"`
|
||||
ContentList []Content `json:"contentList"`
|
||||
IsCompleted int `json:"isCompleted"`
|
||||
} `json:"getDiskResult"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Result struct {
|
||||
ResultCode string `json:"resultCode"`
|
||||
ResultDesc interface{} `json:"resultDesc"`
|
||||
} `json:"result"`
|
||||
UploadResult struct {
|
||||
UploadTaskID string `json:"uploadTaskID"`
|
||||
RedirectionURL string `json:"redirectionUrl"`
|
||||
NewContentIDList []struct {
|
||||
ContentID string `json:"contentID"`
|
||||
ContentName string `json:"contentName"`
|
||||
IsNeedUpload string `json:"isNeedUpload"`
|
||||
FileEtag int64 `json:"fileEtag"`
|
||||
FileVersion int64 `json:"fileVersion"`
|
||||
OverridenFlag int `json:"overridenFlag"`
|
||||
} `json:"newContentIDList"`
|
||||
CatalogIDList interface{} `json:"catalogIDList"`
|
||||
IsSlice interface{} `json:"isSlice"`
|
||||
} `json:"uploadResult"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type InterLayerUploadResult struct {
|
||||
XMLName xml.Name `xml:"result"`
|
||||
Text string `xml:",chardata"`
|
||||
ResultCode int `xml:"resultCode"`
|
||||
Msg string `xml:"msg"`
|
||||
}
|
||||
|
||||
type CloudContent struct {
|
||||
ContentID string `json:"contentID"`
|
||||
//Modifier string `json:"modifier"`
|
||||
//Nickname string `json:"nickname"`
|
||||
//CloudNickName string `json:"cloudNickName"`
|
||||
ContentName string `json:"contentName"`
|
||||
//ContentType int `json:"contentType"`
|
||||
//ContentSuffix string `json:"contentSuffix"`
|
||||
ContentSize int64 `json:"contentSize"`
|
||||
//ContentDesc string `json:"contentDesc"`
|
||||
CreateTime string `json:"createTime"`
|
||||
//Shottime interface{} `json:"shottime"`
|
||||
LastUpdateTime string `json:"lastUpdateTime"`
|
||||
ThumbnailURL string `json:"thumbnailURL"`
|
||||
//MidthumbnailURL string `json:"midthumbnailURL"`
|
||||
//BigthumbnailURL string `json:"bigthumbnailURL"`
|
||||
//PresentURL string `json:"presentURL"`
|
||||
//PresentLURL string `json:"presentLURL"`
|
||||
//PresentHURL string `json:"presentHURL"`
|
||||
//ParentCatalogID string `json:"parentCatalogID"`
|
||||
//Uploader string `json:"uploader"`
|
||||
//UploaderNickName string `json:"uploaderNickName"`
|
||||
//TreeInfo interface{} `json:"treeInfo"`
|
||||
//UpdateTime interface{} `json:"updateTime"`
|
||||
//ExtInfo struct {
|
||||
// Uploader string `json:"uploader"`
|
||||
//} `json:"extInfo"`
|
||||
//EtagOprType interface{} `json:"etagOprType"`
|
||||
}
|
||||
|
||||
type CloudCatalog struct {
|
||||
CatalogID string `json:"catalogID"`
|
||||
CatalogName string `json:"catalogName"`
|
||||
//CloudID string `json:"cloudID"`
|
||||
CreateTime string `json:"createTime"`
|
||||
LastUpdateTime string `json:"lastUpdateTime"`
|
||||
//Creator string `json:"creator"`
|
||||
//CreatorNickname string `json:"creatorNickname"`
|
||||
}
|
||||
|
||||
type QueryContentListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Result struct {
|
||||
ResultCode string `json:"resultCode"`
|
||||
ResultDesc string `json:"resultDesc"`
|
||||
} `json:"result"`
|
||||
Path string `json:"path"`
|
||||
CloudContentList []CloudContent `json:"cloudContentList"`
|
||||
CloudCatalogList []CloudCatalog `json:"cloudCatalogList"`
|
||||
TotalCount int `json:"totalCount"`
|
||||
RecallContent interface{} `json:"recallContent"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type QueryGroupContentListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Result struct {
|
||||
ResultCode string `json:"resultCode"`
|
||||
ResultDesc string `json:"resultDesc"`
|
||||
} `json:"result"`
|
||||
GetGroupContentResult struct {
|
||||
ParentCatalogID string `json:"parentCatalogID"` // 根目录是"0"
|
||||
CatalogList []struct {
|
||||
Catalog
|
||||
Path string `json:"path"`
|
||||
} `json:"catalogList"`
|
||||
ContentList []Content `json:"contentList"`
|
||||
NodeCount int `json:"nodeCount"` // 文件+文件夹数量
|
||||
CtlgCnt int `json:"ctlgCnt"` // 文件夹数量
|
||||
ContCnt int `json:"contCnt"` // 文件数量
|
||||
} `json:"getGroupContentResult"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type ParallelHashCtx struct {
|
||||
PartOffset int64 `json:"partOffset"`
|
||||
}
|
||||
|
||||
type PartInfo struct {
|
||||
PartNumber int64 `json:"partNumber"`
|
||||
PartSize int64 `json:"partSize"`
|
||||
ParallelHashCtx ParallelHashCtx `json:"parallelHashCtx"`
|
||||
}
|
||||
|
||||
type PersonalThumbnail struct {
|
||||
Style string `json:"style"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
type PersonalFileItem struct {
|
||||
FileId string `json:"fileId"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt string `json:"createdAt"`
|
||||
UpdatedAt string `json:"updatedAt"`
|
||||
Thumbnails []PersonalThumbnail `json:"thumbnailUrls"`
|
||||
}
|
||||
|
||||
type PersonalListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Items []PersonalFileItem `json:"items"`
|
||||
NextPageCursor string `json:"nextPageCursor"`
|
||||
}
|
||||
}
|
||||
|
||||
type PersonalPartInfo struct {
|
||||
PartNumber int `json:"partNumber"`
|
||||
UploadUrl string `json:"uploadUrl"`
|
||||
}
|
||||
|
||||
type PersonalUploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileId string `json:"fileId"`
|
||||
FileName string `json:"fileName"`
|
||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||
Exist bool `json:"exist"`
|
||||
RapidUpload bool `json:"rapidUpload"`
|
||||
UploadId string `json:"uploadId"`
|
||||
}
|
||||
}
|
||||
|
||||
type PersonalUploadUrlResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileId string `json:"fileId"`
|
||||
UploadId string `json:"uploadId"`
|
||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRoutePolicyResp struct {
|
||||
Success bool `json:"success"`
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Data struct {
|
||||
RoutePolicyList []struct {
|
||||
SiteID string `json:"siteID"`
|
||||
SiteCode string `json:"siteCode"`
|
||||
ModName string `json:"modName"`
|
||||
HttpUrl string `json:"httpUrl"`
|
||||
HttpsUrl string `json:"httpsUrl"`
|
||||
EnvID string `json:"envID"`
|
||||
ExtInfo string `json:"extInfo"`
|
||||
HashName string `json:"hashName"`
|
||||
ModAddrType int `json:"modAddrType"`
|
||||
} `json:"routePolicyList"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type RefreshTokenResp struct {
|
||||
XMLName xml.Name `xml:"root"`
|
||||
Return string `xml:"return"`
|
||||
Token string `xml:"token"`
|
||||
Expiretime int32 `xml:"expiretime"`
|
||||
AccessToken string `xml:"accessToken"`
|
||||
Desc string `xml:"desc"`
|
||||
}
|
||||
625
drivers/139/util.go
Normal file
625
drivers/139/util.go
Normal file
@@ -0,0 +1,625 @@
|
||||
package _139
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/OpenListTeam/OpenList/v4/drivers/base"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/model"
|
||||
"github.com/OpenListTeam/OpenList/v4/internal/op"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils"
|
||||
"github.com/OpenListTeam/OpenList/v4/pkg/utils/random"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
func (d *Yun139) isFamily() bool {
|
||||
return d.Type == "family"
|
||||
}
|
||||
|
||||
func encodeURIComponent(str string) string {
|
||||
r := url.QueryEscape(str)
|
||||
r = strings.Replace(r, "+", "%20", -1)
|
||||
r = strings.Replace(r, "%21", "!", -1)
|
||||
r = strings.Replace(r, "%27", "'", -1)
|
||||
r = strings.Replace(r, "%28", "(", -1)
|
||||
r = strings.Replace(r, "%29", ")", -1)
|
||||
r = strings.Replace(r, "%2A", "*", -1)
|
||||
return r
|
||||
}
|
||||
|
||||
func calSign(body, ts, randStr string) string {
|
||||
body = encodeURIComponent(body)
|
||||
strs := strings.Split(body, "")
|
||||
sort.Strings(strs)
|
||||
body = strings.Join(strs, "")
|
||||
body = base64.StdEncoding.EncodeToString([]byte(body))
|
||||
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
|
||||
return res
|
||||
}
|
||||
|
||||
func getTime(t string) time.Time {
|
||||
stamp, _ := time.ParseInLocation("20060102150405", t, utils.CNLoc)
|
||||
return stamp
|
||||
}
|
||||
|
||||
func (d *Yun139) refreshToken() error {
|
||||
if d.ref != nil {
|
||||
return d.ref.refreshToken()
|
||||
}
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return fmt.Errorf("authorization decode failed: %s", err)
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 3 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 3")
|
||||
}
|
||||
d.Account = splits[1]
|
||||
strs := strings.Split(splits[2], "|")
|
||||
if len(strs) < 4 {
|
||||
return fmt.Errorf("authorization is invalid, strs < 4")
|
||||
}
|
||||
expiration, err := strconv.ParseInt(strs[3], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("authorization is invalid")
|
||||
}
|
||||
expiration -= time.Now().UnixMilli()
|
||||
if expiration > 1000*60*60*24*15 {
|
||||
// Authorization有效期大于15天无需刷新
|
||||
return nil
|
||||
}
|
||||
if expiration < 0 {
|
||||
return fmt.Errorf("authorization has expired")
|
||||
}
|
||||
|
||||
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
||||
var resp RefreshTokenResp
|
||||
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
||||
_, err = base.RestyClient.R().
|
||||
ForceContentType("application/xml").
|
||||
SetBody(reqBody).
|
||||
SetResult(&resp).
|
||||
Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Return != "0" {
|
||||
return fmt.Errorf("failed to refresh token: %s", resp.Desc)
|
||||
}
|
||||
d.Authorization = base64.StdEncoding.EncodeToString([]byte(splits[0] + ":" + splits[1] + ":" + resp.Token))
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
url := "https://yun.139.com" + pathname
|
||||
req := base.RestyClient.R()
|
||||
randStr := random.String(16)
|
||||
ts := time.Now().Format("2006-01-02 15:04:05")
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
body, err := utils.Json.Marshal(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sign := calSign(string(body), ts, randStr)
|
||||
svcType := "1"
|
||||
if d.isFamily() {
|
||||
svcType = "2"
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"CMS-DEVICE": "default",
|
||||
"Authorization": "Basic " + d.getAuthorization(),
|
||||
"mcloud-channel": "1000101",
|
||||
"mcloud-client": "10701",
|
||||
//"mcloud-route": "001",
|
||||
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
//"mcloud-skey":"",
|
||||
"mcloud-version": "7.14.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"Inner-Hcy-Router-Https": "1",
|
||||
})
|
||||
|
||||
var e BaseResp
|
||||
req.SetResult(&e)
|
||||
res, err := req.Execute(method, url)
|
||||
log.Debugln(res.String())
|
||||
if !e.Success {
|
||||
return nil, errors.New(e.Message)
|
||||
}
|
||||
if resp != nil {
|
||||
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *Yun139) requestRoute(data interface{}, resp interface{}) ([]byte, error) {
|
||||
url := "https://user-njs.yun.139.com/user/route/qryRoutePolicy"
|
||||
req := base.RestyClient.R()
|
||||
randStr := random.String(16)
|
||||
ts := time.Now().Format("2006-01-02 15:04:05")
|
||||
callback := func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
body, err := utils.Json.Marshal(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sign := calSign(string(body), ts, randStr)
|
||||
svcType := "1"
|
||||
if d.isFamily() {
|
||||
svcType = "2"
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"CMS-DEVICE": "default",
|
||||
"Authorization": "Basic " + d.getAuthorization(),
|
||||
"mcloud-channel": "1000101",
|
||||
"mcloud-client": "10701",
|
||||
//"mcloud-route": "001",
|
||||
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
//"mcloud-skey":"",
|
||||
"mcloud-version": "7.14.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"Inner-Hcy-Router-Https": "1",
|
||||
})
|
||||
|
||||
var e BaseResp
|
||||
req.SetResult(&e)
|
||||
res, err := req.Execute(http.MethodPost, url)
|
||||
log.Debugln(res.String())
|
||||
if !e.Success {
|
||||
return nil, errors.New(e.Message)
|
||||
}
|
||||
if resp != nil {
|
||||
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *Yun139) post(pathname string, data interface{}, resp interface{}) ([]byte, error) {
|
||||
return d.request(pathname, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, resp)
|
||||
}
|
||||
|
||||
func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
|
||||
start := 0
|
||||
limit := 100
|
||||
files := make([]model.Obj, 0)
|
||||
for {
|
||||
data := base.Json{
|
||||
"catalogID": catalogID,
|
||||
"sortDirection": 1,
|
||||
"startNumber": start + 1,
|
||||
"endNumber": start + limit,
|
||||
"filterType": 0,
|
||||
"catalogSortType": 0,
|
||||
"contentSortType": 0,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
var resp GetDiskResp
|
||||
_, err := d.post("/orchestration/personalCloud/catalog/v1.0/getDisk", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, catalog := range resp.Data.GetDiskResult.CatalogList {
|
||||
f := model.Object{
|
||||
ID: catalog.CatalogID,
|
||||
Name: catalog.CatalogName,
|
||||
Size: 0,
|
||||
Modified: getTime(catalog.UpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
IsFolder: true,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
for _, content := range resp.Data.GetDiskResult.ContentList {
|
||||
f := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: content.ContentID,
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.UpdateTime),
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
if start+limit >= resp.Data.GetDiskResult.NodeCount {
|
||||
break
|
||||
}
|
||||
start += limit
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) newJson(data map[string]interface{}) base.Json {
|
||||
common := map[string]interface{}{
|
||||
"catalogType": 3,
|
||||
"cloudID": d.CloudID,
|
||||
"cloudType": 1,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
return utils.MergeMap(data, common)
|
||||
}
|
||||
|
||||
func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
pageNum := 1
|
||||
files := make([]model.Obj, 0)
|
||||
for {
|
||||
data := d.newJson(base.Json{
|
||||
"catalogID": catalogID,
|
||||
"contentSortType": 0,
|
||||
"pageInfo": base.Json{
|
||||
"pageNum": pageNum,
|
||||
"pageSize": 100,
|
||||
},
|
||||
"sortDirection": 1,
|
||||
})
|
||||
var resp QueryContentListResp
|
||||
_, err := d.post("/orchestration/familyCloud-rebuild/content/v1.2/queryContentList", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path := resp.Data.Path
|
||||
for _, catalog := range resp.Data.CloudCatalogList {
|
||||
f := model.Object{
|
||||
ID: catalog.CatalogID,
|
||||
Name: catalog.CatalogName,
|
||||
Size: 0,
|
||||
IsFolder: true,
|
||||
Modified: getTime(catalog.LastUpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
Path: path, // 文件夹上一级的Path
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
for _, content := range resp.Data.CloudContentList {
|
||||
f := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: content.ContentID,
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.LastUpdateTime),
|
||||
Ctime: getTime(content.CreateTime),
|
||||
Path: path, // 文件所在目录的Path
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
if resp.Data.TotalCount == 0 {
|
||||
break
|
||||
}
|
||||
pageNum++
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) groupGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
pageNum := 1
|
||||
files := make([]model.Obj, 0)
|
||||
for {
|
||||
data := d.newJson(base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"catalogID": path.Base(catalogID),
|
||||
"contentSortType": 0,
|
||||
"sortDirection": 1,
|
||||
"startNumber": pageNum,
|
||||
"endNumber": pageNum + 99,
|
||||
"path": path.Join(d.RootFolderID, catalogID),
|
||||
})
|
||||
|
||||
var resp QueryGroupContentListResp
|
||||
_, err := d.post("/orchestration/group-rebuild/content/v1.0/queryGroupContentList", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path := resp.Data.GetGroupContentResult.ParentCatalogID
|
||||
for _, catalog := range resp.Data.GetGroupContentResult.CatalogList {
|
||||
f := model.Object{
|
||||
ID: catalog.CatalogID,
|
||||
Name: catalog.CatalogName,
|
||||
Size: 0,
|
||||
IsFolder: true,
|
||||
Modified: getTime(catalog.UpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
Path: catalog.Path, // 文件夹的真实Path, root:/开头
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
for _, content := range resp.Data.GetGroupContentResult.ContentList {
|
||||
f := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: content.ContentID,
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.UpdateTime),
|
||||
Ctime: getTime(content.CreateTime),
|
||||
Path: path, // 文件所在目录的Path
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
if (pageNum + 99) > resp.Data.GetGroupContentResult.NodeCount {
|
||||
break
|
||||
}
|
||||
pageNum = pageNum + 100
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) getLink(contentId string) (string, error) {
|
||||
data := base.Json{
|
||||
"appName": "",
|
||||
"contentID": contentId,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
res, err := d.post("/orchestration/personalCloud/uploadAndDownload/v1.0/downloadRequest",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
func (d *Yun139) familyGetLink(contentId string, path string) (string, error) {
|
||||
data := d.newJson(base.Json{
|
||||
"contentID": contentId,
|
||||
"path": path,
|
||||
})
|
||||
res, err := d.post("/orchestration/familyCloud-rebuild/content/v1.0/getFileDownLoadURL",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
|
||||
func (d *Yun139) groupGetLink(contentId string, path string) (string, error) {
|
||||
data := d.newJson(base.Json{
|
||||
"contentID": contentId,
|
||||
"groupID": d.CloudID,
|
||||
"path": path,
|
||||
})
|
||||
res, err := d.post("/orchestration/group-rebuild/groupManage/v1.0/getGroupFileDownLoadURL",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
|
||||
func unicode(str string) string {
|
||||
textQuoted := strconv.QuoteToASCII(str)
|
||||
textUnquoted := textQuoted[1 : len(textQuoted)-1]
|
||||
return textUnquoted
|
||||
}
|
||||
|
||||
func (d *Yun139) personalRequest(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
url := d.getPersonalCloudHost() + pathname
|
||||
req := base.RestyClient.R()
|
||||
randStr := random.String(16)
|
||||
ts := time.Now().Format("2006-01-02 15:04:05")
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
body, err := utils.Json.Marshal(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sign := calSign(string(body), ts, randStr)
|
||||
svcType := "1"
|
||||
if d.isFamily() {
|
||||
svcType = "2"
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Authorization": "Basic " + d.getAuthorization(),
|
||||
"Caller": "web",
|
||||
"Cms-Device": "default",
|
||||
"Mcloud-Channel": "1000101",
|
||||
"Mcloud-Client": "10701",
|
||||
"Mcloud-Route": "001",
|
||||
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
"Mcloud-Version": "7.14.0",
|
||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"X-Yun-Api-Version": "v1",
|
||||
"X-Yun-App-Channel": "10000034",
|
||||
"X-Yun-Channel-Source": "10000034",
|
||||
"X-Yun-Client-Info": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||
"X-Yun-Module-Type": "100",
|
||||
"X-Yun-Svc-Type": "1",
|
||||
})
|
||||
|
||||
var e BaseResp
|
||||
req.SetResult(&e)
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugln(res.String())
|
||||
if !e.Success {
|
||||
return nil, errors.New(e.Message)
|
||||
}
|
||||
if resp != nil {
|
||||
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
func (d *Yun139) personalPost(pathname string, data interface{}, resp interface{}) ([]byte, error) {
|
||||
return d.personalRequest(pathname, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, resp)
|
||||
}
|
||||
|
||||
func getPersonalTime(t string) time.Time {
|
||||
stamp, err := time.ParseInLocation("2006-01-02T15:04:05.999-07:00", t, utils.CNLoc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return stamp
|
||||
}
|
||||
|
||||
func (d *Yun139) personalGetFiles(fileId string) ([]model.Obj, error) {
|
||||
files := make([]model.Obj, 0)
|
||||
nextPageCursor := ""
|
||||
for {
|
||||
data := base.Json{
|
||||
"imageThumbnailStyleList": []string{"Small", "Large"},
|
||||
"orderBy": "updated_at",
|
||||
"orderDirection": "DESC",
|
||||
"pageInfo": base.Json{
|
||||
"pageCursor": nextPageCursor,
|
||||
"pageSize": 100,
|
||||
},
|
||||
"parentFileId": fileId,
|
||||
}
|
||||
var resp PersonalListResp
|
||||
_, err := d.personalPost("/file/list", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextPageCursor = resp.Data.NextPageCursor
|
||||
for _, item := range resp.Data.Items {
|
||||
var isFolder = (item.Type == "folder")
|
||||
var f model.Obj
|
||||
if isFolder {
|
||||
f = &model.Object{
|
||||
ID: item.FileId,
|
||||
Name: item.Name,
|
||||
Size: 0,
|
||||
Modified: getPersonalTime(item.UpdatedAt),
|
||||
Ctime: getPersonalTime(item.CreatedAt),
|
||||
IsFolder: isFolder,
|
||||
}
|
||||
} else {
|
||||
var Thumbnails = item.Thumbnails
|
||||
var ThumbnailUrl string
|
||||
if d.UseLargeThumbnail {
|
||||
for _, thumb := range Thumbnails {
|
||||
if strings.Contains(thumb.Style, "Large") {
|
||||
ThumbnailUrl = thumb.Url
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if ThumbnailUrl == "" && len(Thumbnails) > 0 {
|
||||
ThumbnailUrl = Thumbnails[len(Thumbnails)-1].Url
|
||||
}
|
||||
f = &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: item.FileId,
|
||||
Name: item.Name,
|
||||
Size: item.Size,
|
||||
Modified: getPersonalTime(item.UpdatedAt),
|
||||
Ctime: getPersonalTime(item.CreatedAt),
|
||||
IsFolder: isFolder,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: ThumbnailUrl},
|
||||
}
|
||||
}
|
||||
files = append(files, f)
|
||||
}
|
||||
if len(nextPageCursor) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) personalGetLink(fileId string) (string, error) {
|
||||
data := base.Json{
|
||||
"fileId": fileId,
|
||||
}
|
||||
res, err := d.personalPost("/file/getDownloadUrl",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var cdnUrl = jsoniter.Get(res, "data", "cdnUrl").ToString()
|
||||
if cdnUrl != "" {
|
||||
return cdnUrl, nil
|
||||
} else {
|
||||
return jsoniter.Get(res, "data", "url").ToString(), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) getAuthorization() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getAuthorization()
|
||||
}
|
||||
return d.Authorization
|
||||
}
|
||||
func (d *Yun139) getAccount() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getAccount()
|
||||
}
|
||||
return d.Account
|
||||
}
|
||||
func (d *Yun139) getPersonalCloudHost() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getPersonalCloudHost()
|
||||
}
|
||||
return d.PersonalCloudHost
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user