Compare commits

...

5 Commits

Author SHA1 Message Date
千石
3cddb6b7ed fix(driver): Handle Lanzou anti-crawler challenge by recalculating cookies (#9364)
- Detect and solve `acw_sc__v2` challenge to bypass anti-crawler validation
- Refactored request header initialization logic for clarity
2025-11-11 20:27:20 +08:00
千石
ce41587095 feat(cloud189): Added sanitization for file and folder names (#9366)
- Introduced `sanitizeName` function to remove four-byte characters (e.g., emojis) from names before upload or creation.
- Added `StripEmoji` option in driver configurations for cloud189 and cloud189pc.
- Updated file and folder operations (upload, rename, and creation) to use sanitized names.
- Ensured compatibility with both cloud189 and cloud189pc implementations.
2025-11-11 20:26:51 +08:00
千石
0cbc7ebc92 feat(driver): Added support for Gitee driver (#9368)
* feat(driver): Added support for Gitee driver

- Implemented core driver functions including initialization, file listing, and file linking
- Added Gitee-specific API interaction and object mapping
- Registered Gitee driver in the driver registry

* feat(driver): Added cookie-based authentication support for Gitee driver

- Extended request handling to include `Cookie` header if provided
- Updated metadata to include `cookie` field with appropriate documentation
- Adjusted file link generation to propagate `Cookie` headers in requests
2025-11-11 20:25:26 +08:00
千石
b4d9beb49c fix(Mediatrack): Add support for X-Device-Fingerprint header (#9354)
Introduce a `DeviceFingerprint` field to the request metadata.
This field is used to conditionally set the `X-Device-Fingerprint`
HTTP header in outgoing requests if its value is not empty.
2025-10-24 00:31:15 +08:00
千石
4c8401855c feat: Add new driver bitqiu support (#9355)
* feat(bitqiu): Add Bitqiu cloud drive support

- Implement the new Bitqiu cloud drive.
- Add core driver logic, metadata handling, and utility functions.
- Register the Bitqiu driver for use.

* feat(driver): Implement GetLink, CreateDir, and Move operations

- Implement `GetLink` method to retrieve download links for files.
- Implement `CreateDir` method to create new directories.
- Implement `Move` method to relocate files and directories.
- Add new API endpoints and data structures for download and directory creation responses.
- Integrate retry logic with re-authentication for API calls in implemented methods.
- Update HTTP request headers to include `x-requested-with`.

* feat(bitqiu): Add rename, copy, and delete operations

- Implement `Rename` operation with retry logic and API calls.
- Implement `Copy` operation, including asynchronous handling, polling for completion, and status checks.
- Implement `Remove` operation with retry logic and API calls.
- Add new API endpoint URLs for rename, copy, and delete, and a new copy success code.
- Introduce `AsyncManagerData`, `AsyncTask`, and `AsyncTaskInfo` types to support async copy status monitoring.
- Add utility functions `updateObjectName` and `parentPathOf` for object manipulation.
- Integrate login retry mechanism for all file operations.

* feat(bitqiu-upload): Implement chunked file upload support

- Implement multi-part chunked upload logic for the BitQiu service.
- Introduce `UploadInitData` and `ChunkUploadResponse` structs for structured API communication.
- Refactor the `Save` method to orchestrate initial upload, chunked data transfer, and finalization.
- Add `uploadFileInChunks` function to handle sequential uploading of file parts.
- Add `completeChunkUpload` function to finalize the chunked upload process on the server.
- Ensure proper temporary file cleanup using `defer tmpFile.Close()`.

* feat(driver): Implement automatic root folder ID retrieval

- Add `userInfoURL` constant for fetching user information.
- Implement `ensureRootFolderID` function to retrieve and set the driver's root folder ID if not already present.
- Integrate `ensureRootFolderID` into the driver's `Init` process.
- Define `UserInfoData` struct to parse the `rootDirId` from user information responses.

* feat(client): Implement configurable user agent

*   Introduce a configurable `UserAgent` field in the client's settings.
*   Add a `userAgent()` method to retrieve the user agent, prioritizing the custom setting or using a predefined default.
*   Apply the determined user agent to all outbound HTTP requests made by the `BitQiu` client.
2025-10-24 00:29:33 +08:00
18 changed files with 1478 additions and 23 deletions

View File

@@ -80,9 +80,10 @@ func (d *Cloud189) Link(ctx context.Context, file model.Obj, args model.LinkArgs
}
func (d *Cloud189) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
safeName := d.sanitizeName(dirName)
form := map[string]string{
"parentFolderId": parentDir.GetID(),
"folderName": dirName,
"folderName": safeName,
}
_, err := d.request("https://cloud.189.cn/api/open/file/createFolder.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
@@ -126,9 +127,10 @@ func (d *Cloud189) Rename(ctx context.Context, srcObj model.Obj, newName string)
idKey = "folderId"
nameKey = "destFolderName"
}
safeName := d.sanitizeName(newName)
form := map[string]string{
idKey: srcObj.GetID(),
nameKey: newName,
nameKey: safeName,
}
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)

View File

@@ -6,9 +6,10 @@ import (
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
Cookie string `json:"cookie" help:"Fill in the cookie if need captcha"`
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
Cookie string `json:"cookie" help:"Fill in the cookie if need captcha"`
StripEmoji bool `json:"strip_emoji" help:"Remove four-byte characters (e.g., emoji) before upload"`
driver.RootID
}

View File

@@ -11,9 +11,11 @@ import (
"io"
"math"
"net/http"
"path"
"strconv"
"strings"
"time"
"unicode/utf8"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
@@ -222,13 +224,37 @@ func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
return res, nil
}
func (d *Cloud189) sanitizeName(name string) string {
if !d.StripEmoji {
return name
}
b := strings.Builder{}
for _, r := range name {
if utf8.RuneLen(r) == 4 {
continue
}
b.WriteRune(r)
}
sanitized := b.String()
if sanitized == "" {
ext := path.Ext(name)
if ext != "" {
sanitized = "file" + ext
} else {
sanitized = "file"
}
}
return sanitized
}
func (d *Cloud189) oldUpload(dstDir model.Obj, file model.FileStreamer) error {
safeName := d.sanitizeName(file.GetName())
res, err := d.client.R().SetMultipartFormData(map[string]string{
"parentId": dstDir.GetID(),
"sessionKey": "??",
"opertype": "1",
"fname": file.GetName(),
}).SetMultipartField("Filedata", file.GetName(), file.GetMimetype(), file).Post("https://hb02.upload.cloud.189.cn/v1/DCIWebUploadAction")
"fname": safeName,
}).SetMultipartField("Filedata", safeName, file.GetMimetype(), file).Post("https://hb02.upload.cloud.189.cn/v1/DCIWebUploadAction")
if err != nil {
return err
}
@@ -313,9 +339,10 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
const DEFAULT int64 = 10485760
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
safeName := d.sanitizeName(file.GetName())
res, err := d.uploadRequest("/person/initMultiUpload", map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": encode(file.GetName()),
"fileName": encode(safeName),
"fileSize": strconv.FormatInt(file.GetSize(), 10),
"sliceSize": strconv.FormatInt(DEFAULT, 10),
"lazyCheck": "1",

View File

@@ -205,10 +205,11 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
fullUrl += "/createFolder.action"
var newFolder Cloud189Folder
safeName := y.sanitizeName(dirName)
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"folderName": dirName,
"folderName": safeName,
"relativePath": "",
})
if isFamily {
@@ -225,6 +226,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
if err != nil {
return nil, err
}
newFolder.Name = safeName
return &newFolder, nil
}
@@ -258,21 +260,29 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
}
var newObj model.Obj
safeName := y.sanitizeName(newName)
switch f := srcObj.(type) {
case *Cloud189File:
fullUrl += "/renameFile.action"
queryParam["fileId"] = srcObj.GetID()
queryParam["destFileName"] = newName
queryParam["destFileName"] = safeName
newObj = &Cloud189File{Icon: f.Icon} // 复用预览
case *Cloud189Folder:
fullUrl += "/renameFolder.action"
queryParam["folderId"] = srcObj.GetID()
queryParam["destFolderName"] = newName
queryParam["destFolderName"] = safeName
newObj = &Cloud189Folder{}
default:
return nil, errs.NotSupport
}
switch obj := newObj.(type) {
case *Cloud189File:
obj.Name = safeName
case *Cloud189Folder:
obj.Name = safeName
}
_, err := y.request(fullUrl, method, func(req *resty.Request) {
req.SetContext(ctx).SetQueryParams(queryParam)
}, nil, newObj, isFamily)

View File

@@ -6,9 +6,10 @@ import (
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
VCode string `json:"validate_code"`
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
VCode string `json:"validate_code"`
StripEmoji bool `json:"strip_emoji" help:"Remove four-byte characters (e.g., emoji) before upload"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`

View File

@@ -12,11 +12,13 @@ import (
"net/http/cookiejar"
"net/url"
"os"
"path"
"regexp"
"sort"
"strconv"
"strings"
"time"
"unicode/utf8"
"golang.org/x/sync/semaphore"
@@ -57,6 +59,29 @@ const (
CHANNEL_ID = "web_cloud.189.cn"
)
func (y *Cloud189PC) sanitizeName(name string) string {
if !y.StripEmoji {
return name
}
b := strings.Builder{}
for _, r := range name {
if utf8.RuneLen(r) == 4 {
continue
}
b.WriteRune(r)
}
sanitized := b.String()
if sanitized == "" {
ext := path.Ext(name)
if ext != "" {
sanitized = "file" + ext
} else {
sanitized = "file"
}
}
return sanitized
}
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
dateOfGmt := getHttpDateStr()
sessionKey := y.getTokenInfo().SessionKey
@@ -475,10 +500,11 @@ func (y *Cloud189PC) refreshSession() (err error) {
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
size := file.GetSize()
sliceSize := partSize(size)
safeName := y.sanitizeName(file.GetName())
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileName": url.QueryEscape(safeName),
"fileSize": fmt.Sprint(file.GetSize()),
"sliceSize": fmt.Sprint(sliceSize),
"lazyCheck": "1",
@@ -596,7 +622,8 @@ func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream m
return nil, errors.New("invalid hash")
}
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()), isFamily)
safeName := y.sanitizeName(stream.GetName())
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, safeName, fmt.Sprint(stream.GetSize()), isFamily)
if err != nil {
return nil, err
}
@@ -615,6 +642,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
tmpF *os.File
err error
)
safeName := y.sanitizeName(file.GetName())
size := file.GetSize()
if _, ok := cache.(io.ReaderAt); !ok && size > 0 {
tmpF, err = os.CreateTemp(conf.Conf.TempDir, "file-*")
@@ -697,7 +725,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
//step.2 预上传
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileName": url.QueryEscape(safeName),
"fileSize": fmt.Sprint(file.GetSize()),
"fileMd5": fileMd5Hex,
"sliceSize": fmt.Sprint(sliceSize),
@@ -833,9 +861,10 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
return nil, err
}
rateLimited := driver.NewLimitedUploadStream(ctx, io.NopCloser(tempFile))
safeName := y.sanitizeName(file.GetName())
// 创建上传会话
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()), isFamily)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, safeName, fmt.Sprint(file.GetSize()), isFamily)
if err != nil {
return nil, err
}

View File

@@ -21,6 +21,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
_ "github.com/alist-org/alist/v3/drivers/bitqiu"
_ "github.com/alist-org/alist/v3/drivers/chaoxing"
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
_ "github.com/alist-org/alist/v3/drivers/cloudreve_v4"
@@ -30,6 +31,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/dropbox"
_ "github.com/alist-org/alist/v3/drivers/febbox"
_ "github.com/alist-org/alist/v3/drivers/ftp"
_ "github.com/alist-org/alist/v3/drivers/gitee"
_ "github.com/alist-org/alist/v3/drivers/github"
_ "github.com/alist-org/alist/v3/drivers/github_releases"
_ "github.com/alist-org/alist/v3/drivers/gofile"

767
drivers/bitqiu/driver.go Normal file
View File

@@ -0,0 +1,767 @@
package bitqiu
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http/cookiejar"
"path"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
streamPkg "github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
"github.com/google/uuid"
)
const (
baseURL = "https://pan.bitqiu.com"
loginURL = baseURL + "/loginServer/login"
userInfoURL = baseURL + "/user/getInfo"
listURL = baseURL + "/apiToken/cfi/fs/resources/pages"
uploadInitializeURL = baseURL + "/apiToken/cfi/fs/upload/v2/initialize"
uploadCompleteURL = baseURL + "/apiToken/cfi/fs/upload/v2/complete"
downloadURL = baseURL + "/download/getUrl"
createDirURL = baseURL + "/resource/create"
moveResourceURL = baseURL + "/resource/remove"
renameResourceURL = baseURL + "/resource/rename"
copyResourceURL = baseURL + "/apiToken/cfi/fs/async/copy"
copyManagerURL = baseURL + "/apiToken/cfi/fs/async/manager"
deleteResourceURL = baseURL + "/resource/delete"
successCode = "10200"
uploadSuccessCode = "30010"
copySubmittedCode = "10300"
orgChannel = "default|default|default"
)
const (
copyPollInterval = time.Second
copyPollMaxAttempts = 60
chunkSize = int64(1 << 20)
)
const defaultUserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36"
type BitQiu struct {
model.Storage
Addition
client *resty.Client
userID string
}
func (d *BitQiu) Config() driver.Config {
return config
}
func (d *BitQiu) GetAddition() driver.Additional {
return &d.Addition
}
func (d *BitQiu) Init(ctx context.Context) error {
if d.Addition.UserPlatform == "" {
d.Addition.UserPlatform = uuid.NewString()
op.MustSaveDriverStorage(d)
}
if d.client == nil {
jar, err := cookiejar.New(nil)
if err != nil {
return err
}
d.client = base.NewRestyClient()
d.client.SetBaseURL(baseURL)
d.client.SetCookieJar(jar)
}
d.client.SetHeader("user-agent", d.userAgent())
return d.login(ctx)
}
func (d *BitQiu) Drop(ctx context.Context) error {
d.client = nil
d.userID = ""
return nil
}
func (d *BitQiu) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
parentID := d.resolveParentID(dir)
dirPath := ""
if dir != nil {
dirPath = dir.GetPath()
}
pageSize := d.pageSize()
orderType := d.orderType()
desc := d.orderDesc()
var results []model.Obj
page := 1
for {
form := map[string]string{
"parentId": parentID,
"limit": strconv.Itoa(pageSize),
"orderType": orderType,
"desc": desc,
"model": "1",
"userId": d.userID,
"currentPage": strconv.Itoa(page),
"page": strconv.Itoa(page),
"org_channel": orgChannel,
}
var resp Response[ResourcePage]
if err := d.postForm(ctx, listURL, form, &resp); err != nil {
return nil, err
}
if resp.Code != successCode {
if resp.Code == "10401" || resp.Code == "10404" {
if err := d.login(ctx); err != nil {
return nil, err
}
continue
}
return nil, fmt.Errorf("list failed: %s", resp.Message)
}
objs, err := utils.SliceConvert(resp.Data.Data, func(item Resource) (model.Obj, error) {
return item.toObject(parentID, dirPath)
})
if err != nil {
return nil, err
}
results = append(results, objs...)
if !resp.Data.HasNext || len(resp.Data.Data) == 0 {
break
}
page++
}
return results, nil
}
func (d *BitQiu) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if file.IsDir() {
return nil, errs.NotFile
}
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
form := map[string]string{
"fileIds": file.GetID(),
"org_channel": orgChannel,
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[DownloadData]
if err := d.postForm(ctx, downloadURL, form, &resp); err != nil {
return nil, err
}
switch resp.Code {
case successCode:
if resp.Data.URL == "" {
return nil, fmt.Errorf("empty download url returned")
}
return &model.Link{URL: resp.Data.URL}, nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("get link failed: %s", resp.Message)
}
}
return nil, fmt.Errorf("get link failed: retry limit reached")
}
func (d *BitQiu) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
parentID := d.resolveParentID(parentDir)
parentPath := ""
if parentDir != nil {
parentPath = parentDir.GetPath()
}
form := map[string]string{
"parentId": parentID,
"name": dirName,
"org_channel": orgChannel,
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[CreateDirData]
if err := d.postForm(ctx, createDirURL, form, &resp); err != nil {
return nil, err
}
switch resp.Code {
case successCode:
newParentID := parentID
if resp.Data.ParentID != "" {
newParentID = resp.Data.ParentID
}
name := resp.Data.Name
if name == "" {
name = dirName
}
resource := Resource{
ResourceID: resp.Data.DirID,
ResourceType: 1,
Name: name,
ParentID: newParentID,
}
obj, err := resource.toObject(newParentID, parentPath)
if err != nil {
return nil, err
}
if o, ok := obj.(*Object); ok {
o.ParentID = newParentID
}
return obj, nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("create folder failed: %s", resp.Message)
}
}
return nil, fmt.Errorf("create folder failed: retry limit reached")
}
func (d *BitQiu) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
targetParentID := d.resolveParentID(dstDir)
form := map[string]string{
"dirIds": "",
"fileIds": "",
"parentId": targetParentID,
"org_channel": orgChannel,
}
if srcObj.IsDir() {
form["dirIds"] = srcObj.GetID()
} else {
form["fileIds"] = srcObj.GetID()
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[any]
if err := d.postForm(ctx, moveResourceURL, form, &resp); err != nil {
return nil, err
}
switch resp.Code {
case successCode:
dstPath := ""
if dstDir != nil {
dstPath = dstDir.GetPath()
}
if setter, ok := srcObj.(model.SetPath); ok {
setter.SetPath(path.Join(dstPath, srcObj.GetName()))
}
if o, ok := srcObj.(*Object); ok {
o.ParentID = targetParentID
}
return srcObj, nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("move failed: %s", resp.Message)
}
}
return nil, fmt.Errorf("move failed: retry limit reached")
}
func (d *BitQiu) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
form := map[string]string{
"resourceId": srcObj.GetID(),
"name": newName,
"type": "0",
"org_channel": orgChannel,
}
if srcObj.IsDir() {
form["type"] = "1"
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[any]
if err := d.postForm(ctx, renameResourceURL, form, &resp); err != nil {
return nil, err
}
switch resp.Code {
case successCode:
return updateObjectName(srcObj, newName), nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("rename failed: %s", resp.Message)
}
}
return nil, fmt.Errorf("rename failed: retry limit reached")
}
func (d *BitQiu) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
targetParentID := d.resolveParentID(dstDir)
form := map[string]string{
"dirIds": "",
"fileIds": "",
"parentId": targetParentID,
"org_channel": orgChannel,
}
if srcObj.IsDir() {
form["dirIds"] = srcObj.GetID()
} else {
form["fileIds"] = srcObj.GetID()
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[any]
if err := d.postForm(ctx, copyResourceURL, form, &resp); err != nil {
return nil, err
}
switch resp.Code {
case successCode, copySubmittedCode:
return d.waitForCopiedObject(ctx, srcObj, dstDir)
case "10401", "10404":
if err := d.login(ctx); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("copy failed: %s", resp.Message)
}
}
return nil, fmt.Errorf("copy failed: retry limit reached")
}
func (d *BitQiu) Remove(ctx context.Context, obj model.Obj) error {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return err
}
}
form := map[string]string{
"dirIds": "",
"fileIds": "",
"org_channel": orgChannel,
}
if obj.IsDir() {
form["dirIds"] = obj.GetID()
} else {
form["fileIds"] = obj.GetID()
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[any]
if err := d.postForm(ctx, deleteResourceURL, form, &resp); err != nil {
return err
}
switch resp.Code {
case successCode:
return nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return err
}
default:
return fmt.Errorf("remove failed: %s", resp.Message)
}
}
return fmt.Errorf("remove failed: retry limit reached")
}
func (d *BitQiu) Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
if d.userID == "" {
if err := d.login(ctx); err != nil {
return nil, err
}
}
up(0)
tmpFile, md5sum, err := streamPkg.CacheFullInTempFileAndHash(file, utils.MD5)
if err != nil {
return nil, err
}
defer tmpFile.Close()
parentID := d.resolveParentID(dstDir)
parentPath := ""
if dstDir != nil {
parentPath = dstDir.GetPath()
}
form := map[string]string{
"parentId": parentID,
"name": file.GetName(),
"size": strconv.FormatInt(file.GetSize(), 10),
"hash": md5sum,
"sampleMd5": md5sum,
"org_channel": orgChannel,
}
var resp Response[json.RawMessage]
if err = d.postForm(ctx, uploadInitializeURL, form, &resp); err != nil {
return nil, err
}
if resp.Code != uploadSuccessCode {
switch resp.Code {
case successCode:
var initData UploadInitData
if err := json.Unmarshal(resp.Data, &initData); err != nil {
return nil, fmt.Errorf("parse upload init response failed: %w", err)
}
serverCode, err := d.uploadFileInChunks(ctx, tmpFile, file.GetSize(), md5sum, initData, up)
if err != nil {
return nil, err
}
obj, err := d.completeChunkUpload(ctx, initData, parentID, parentPath, file.GetName(), file.GetSize(), md5sum, serverCode)
if err != nil {
return nil, err
}
up(100)
return obj, nil
default:
return nil, fmt.Errorf("upload failed: %s", resp.Message)
}
}
var resource Resource
if err := json.Unmarshal(resp.Data, &resource); err != nil {
return nil, fmt.Errorf("parse upload response failed: %w", err)
}
obj, err := resource.toObject(parentID, parentPath)
if err != nil {
return nil, err
}
up(100)
return obj, nil
}
func (d *BitQiu) uploadFileInChunks(ctx context.Context, tmpFile model.File, size int64, md5sum string, initData UploadInitData, up driver.UpdateProgress) (string, error) {
if d.client == nil {
return "", fmt.Errorf("client not initialized")
}
if size <= 0 {
return "", fmt.Errorf("invalid file size")
}
buf := make([]byte, chunkSize)
offset := int64(0)
var finishedFlag string
for offset < size {
chunkLen := chunkSize
remaining := size - offset
if remaining < chunkLen {
chunkLen = remaining
}
reader := io.NewSectionReader(tmpFile, offset, chunkLen)
chunkBuf := buf[:chunkLen]
if _, err := io.ReadFull(reader, chunkBuf); err != nil {
return "", fmt.Errorf("read chunk failed: %w", err)
}
headers := map[string]string{
"accept": "*/*",
"content-type": "application/octet-stream",
"appid": initData.AppID,
"token": initData.Token,
"userid": strconv.FormatInt(initData.UserID, 10),
"serialnumber": initData.SerialNumber,
"hash": md5sum,
"len": strconv.FormatInt(chunkLen, 10),
"offset": strconv.FormatInt(offset, 10),
"user-agent": d.userAgent(),
}
var chunkResp ChunkUploadResponse
req := d.client.R().
SetContext(ctx).
SetHeaders(headers).
SetBody(chunkBuf).
SetResult(&chunkResp)
if _, err := req.Post(initData.UploadURL); err != nil {
return "", err
}
if chunkResp.ErrCode != 0 {
return "", fmt.Errorf("chunk upload failed with code %d", chunkResp.ErrCode)
}
finishedFlag = chunkResp.FinishedFlag
offset += chunkLen
up(float64(offset) * 100 / float64(size))
}
if finishedFlag == "" {
return "", fmt.Errorf("upload finished without server code")
}
return finishedFlag, nil
}
func (d *BitQiu) completeChunkUpload(ctx context.Context, initData UploadInitData, parentID, parentPath, name string, size int64, md5sum, serverCode string) (model.Obj, error) {
form := map[string]string{
"currentPage": "1",
"limit": "1",
"userId": strconv.FormatInt(initData.UserID, 10),
"status": "0",
"parentId": parentID,
"name": name,
"fileUid": initData.FileUID,
"fileSid": initData.FileSID,
"size": strconv.FormatInt(size, 10),
"serverCode": serverCode,
"snapTime": "",
"hash": md5sum,
"sampleMd5": md5sum,
"org_channel": orgChannel,
}
var resp Response[Resource]
if err := d.postForm(ctx, uploadCompleteURL, form, &resp); err != nil {
return nil, err
}
if resp.Code != successCode {
return nil, fmt.Errorf("complete upload failed: %s", resp.Message)
}
return resp.Data.toObject(parentID, parentPath)
}
func (d *BitQiu) login(ctx context.Context) error {
if d.client == nil {
return fmt.Errorf("client not initialized")
}
form := map[string]string{
"passport": d.Username,
"password": utils.GetMD5EncodeStr(d.Password),
"remember": "0",
"captcha": "",
"org_channel": orgChannel,
}
var resp Response[LoginData]
if err := d.postForm(ctx, loginURL, form, &resp); err != nil {
return err
}
if resp.Code != successCode {
return fmt.Errorf("login failed: %s", resp.Message)
}
d.userID = strconv.FormatInt(resp.Data.UserID, 10)
return d.ensureRootFolderID(ctx)
}
func (d *BitQiu) ensureRootFolderID(ctx context.Context) error {
rootID := d.Addition.GetRootId()
if rootID != "" && rootID != "0" {
return nil
}
form := map[string]string{
"org_channel": orgChannel,
}
var resp Response[UserInfoData]
if err := d.postForm(ctx, userInfoURL, form, &resp); err != nil {
return err
}
if resp.Code != successCode {
return fmt.Errorf("get user info failed: %s", resp.Message)
}
if resp.Data.RootDirID == "" {
return fmt.Errorf("get user info failed: empty root dir id")
}
if d.Addition.RootFolderID != resp.Data.RootDirID {
d.Addition.RootFolderID = resp.Data.RootDirID
op.MustSaveDriverStorage(d)
}
return nil
}
func (d *BitQiu) postForm(ctx context.Context, url string, form map[string]string, result interface{}) error {
if d.client == nil {
return fmt.Errorf("client not initialized")
}
req := d.client.R().
SetContext(ctx).
SetHeaders(d.commonHeaders()).
SetFormData(form)
if result != nil {
req = req.SetResult(result)
}
_, err := req.Post(url)
return err
}
func (d *BitQiu) waitForCopiedObject(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
expectedName := srcObj.GetName()
expectedIsDir := srcObj.IsDir()
var lastListErr error
for attempt := 0; attempt < copyPollMaxAttempts; attempt++ {
if attempt > 0 {
if err := waitWithContext(ctx, copyPollInterval); err != nil {
return nil, err
}
}
if err := d.checkCopyFailure(ctx); err != nil {
return nil, err
}
obj, err := d.findObjectInDir(ctx, dstDir, expectedName, expectedIsDir)
if err != nil {
lastListErr = err
continue
}
if obj != nil {
return obj, nil
}
}
if lastListErr != nil {
return nil, lastListErr
}
return nil, fmt.Errorf("copy task timed out waiting for completion")
}
func (d *BitQiu) checkCopyFailure(ctx context.Context) error {
form := map[string]string{
"org_channel": orgChannel,
}
for attempt := 0; attempt < 2; attempt++ {
var resp Response[AsyncManagerData]
if err := d.postForm(ctx, copyManagerURL, form, &resp); err != nil {
return err
}
switch resp.Code {
case successCode:
if len(resp.Data.FailTasks) > 0 {
return fmt.Errorf("copy failed: %s", resp.Data.FailTasks[0].ErrorMessage())
}
return nil
case "10401", "10404":
if err := d.login(ctx); err != nil {
return err
}
default:
return fmt.Errorf("query copy status failed: %s", resp.Message)
}
}
return fmt.Errorf("query copy status failed: retry limit reached")
}
func (d *BitQiu) findObjectInDir(ctx context.Context, dir model.Obj, name string, isDir bool) (model.Obj, error) {
objs, err := d.List(ctx, dir, model.ListArgs{})
if err != nil {
return nil, err
}
for _, obj := range objs {
if obj.GetName() == name && obj.IsDir() == isDir {
return obj, nil
}
}
return nil, nil
}
func waitWithContext(ctx context.Context, d time.Duration) error {
timer := time.NewTimer(d)
defer timer.Stop()
select {
case <-ctx.Done():
return ctx.Err()
case <-timer.C:
return nil
}
}
func (d *BitQiu) commonHeaders() map[string]string {
headers := map[string]string{
"accept": "application/json, text/plain, */*",
"accept-language": "en-US,en;q=0.9",
"cache-control": "no-cache",
"pragma": "no-cache",
"user-platform": d.Addition.UserPlatform,
"x-kl-saas-ajax-request": "Ajax_Request",
"x-requested-with": "XMLHttpRequest",
"referer": baseURL + "/",
"origin": baseURL,
"user-agent": d.userAgent(),
}
return headers
}
func (d *BitQiu) userAgent() string {
if ua := strings.TrimSpace(d.Addition.UserAgent); ua != "" {
return ua
}
return defaultUserAgent
}
func (d *BitQiu) resolveParentID(dir model.Obj) string {
if dir != nil && dir.GetID() != "" {
return dir.GetID()
}
if root := d.Addition.GetRootId(); root != "" {
return root
}
return config.DefaultRoot
}
func (d *BitQiu) pageSize() int {
if size, err := strconv.Atoi(d.Addition.PageSize); err == nil && size > 0 {
return size
}
return 24
}
func (d *BitQiu) orderType() string {
if d.Addition.OrderType != "" {
return d.Addition.OrderType
}
return "updateTime"
}
func (d *BitQiu) orderDesc() string {
if d.Addition.OrderDesc {
return "1"
}
return "0"
}
var _ driver.Driver = (*BitQiu)(nil)
var _ driver.PutResult = (*BitQiu)(nil)

28
drivers/bitqiu/meta.go Normal file
View File

@@ -0,0 +1,28 @@
package bitqiu
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
UserPlatform string `json:"user_platform" help:"Optional device identifier; auto-generated if empty."`
OrderType string `json:"order_type" type:"select" options:"updateTime,createTime,name,size" default:"updateTime"`
OrderDesc bool `json:"order_desc"`
PageSize string `json:"page_size" default:"24" help:"Number of entries to request per page."`
UserAgent string `json:"user_agent" default:"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36"`
}
var config = driver.Config{
Name: "BitQiu",
DefaultRoot: "0",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &BitQiu{}
})
}

107
drivers/bitqiu/types.go Normal file
View File

@@ -0,0 +1,107 @@
package bitqiu
import "encoding/json"
type Response[T any] struct {
Code string `json:"code"`
Message string `json:"message"`
Data T `json:"data"`
}
type LoginData struct {
UserID int64 `json:"userId"`
}
type ResourcePage struct {
CurrentPage int `json:"currentPage"`
PageSize int `json:"pageSize"`
TotalCount int `json:"totalCount"`
TotalPageCount int `json:"totalPageCount"`
Data []Resource `json:"data"`
HasNext bool `json:"hasNext"`
}
type Resource struct {
ResourceID string `json:"resourceId"`
ResourceUID string `json:"resourceUid"`
ResourceType int `json:"resourceType"`
ParentID string `json:"parentId"`
Name string `json:"name"`
ExtName string `json:"extName"`
Size *json.Number `json:"size"`
CreateTime *string `json:"createTime"`
UpdateTime *string `json:"updateTime"`
FileMD5 string `json:"fileMd5"`
}
type DownloadData struct {
URL string `json:"url"`
MD5 string `json:"md5"`
Size int64 `json:"size"`
}
type UserInfoData struct {
RootDirID string `json:"rootDirId"`
}
type CreateDirData struct {
DirID string `json:"dirId"`
Name string `json:"name"`
ParentID string `json:"parentId"`
}
type AsyncManagerData struct {
WaitTasks []AsyncTask `json:"waitTaskList"`
RunningTasks []AsyncTask `json:"runningTaskList"`
SuccessTasks []AsyncTask `json:"successTaskList"`
FailTasks []AsyncTask `json:"failTaskList"`
TaskList []AsyncTask `json:"taskList"`
}
type AsyncTask struct {
TaskID string `json:"taskId"`
Status int `json:"status"`
ErrorMsg string `json:"errorMsg"`
Message string `json:"message"`
Result *AsyncTaskInfo `json:"result"`
TargetName string `json:"targetName"`
TargetDirID string `json:"parentId"`
}
type AsyncTaskInfo struct {
Resource Resource `json:"resource"`
DirID string `json:"dirId"`
FileID string `json:"fileId"`
Name string `json:"name"`
ParentID string `json:"parentId"`
}
func (t AsyncTask) ErrorMessage() string {
if t.ErrorMsg != "" {
return t.ErrorMsg
}
if t.Message != "" {
return t.Message
}
return "unknown error"
}
type UploadInitData struct {
Name string `json:"name"`
Size int64 `json:"size"`
Token string `json:"token"`
FileUID string `json:"fileUid"`
FileSID string `json:"fileSid"`
ParentID string `json:"parentId"`
UserID int64 `json:"userId"`
SerialNumber string `json:"serialNumber"`
UploadURL string `json:"uploadUrl"`
AppID string `json:"appId"`
}
type ChunkUploadResponse struct {
ErrCode int `json:"errCode"`
Offset int64 `json:"offset"`
Finished int `json:"finished"`
FinishedFlag string `json:"finishedFlag"`
}

102
drivers/bitqiu/util.go Normal file
View File

@@ -0,0 +1,102 @@
package bitqiu
import (
"path"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
)
type Object struct {
model.Object
ParentID string
}
func (r Resource) toObject(parentID, parentPath string) (model.Obj, error) {
id := r.ResourceID
if id == "" {
id = r.ResourceUID
}
obj := &Object{
Object: model.Object{
ID: id,
Name: r.Name,
IsFolder: r.ResourceType == 1,
},
ParentID: parentID,
}
if r.Size != nil {
if size, err := (*r.Size).Int64(); err == nil {
obj.Size = size
}
}
if ct := parseBitQiuTime(r.CreateTime); !ct.IsZero() {
obj.Ctime = ct
}
if mt := parseBitQiuTime(r.UpdateTime); !mt.IsZero() {
obj.Modified = mt
}
if r.FileMD5 != "" {
obj.HashInfo = utils.NewHashInfo(utils.MD5, strings.ToLower(r.FileMD5))
}
obj.SetPath(path.Join(parentPath, obj.Name))
return obj, nil
}
func parseBitQiuTime(value *string) time.Time {
if value == nil {
return time.Time{}
}
trimmed := strings.TrimSpace(*value)
if trimmed == "" {
return time.Time{}
}
if ts, err := time.ParseInLocation("2006-01-02 15:04:05", trimmed, time.Local); err == nil {
return ts
}
return time.Time{}
}
func updateObjectName(obj model.Obj, newName string) model.Obj {
newPath := path.Join(parentPathOf(obj.GetPath()), newName)
switch o := obj.(type) {
case *Object:
o.Name = newName
o.Object.Name = newName
o.SetPath(newPath)
return o
case *model.Object:
o.Name = newName
o.SetPath(newPath)
return o
}
if setter, ok := obj.(model.SetPath); ok {
setter.SetPath(newPath)
}
return &model.Object{
ID: obj.GetID(),
Path: newPath,
Name: newName,
Size: obj.GetSize(),
Modified: obj.ModTime(),
Ctime: obj.CreateTime(),
IsFolder: obj.IsDir(),
HashInfo: obj.GetHash(),
}
}
func parentPathOf(p string) string {
if p == "" {
return ""
}
dir := path.Dir(p)
if dir == "." {
return ""
}
return dir
}

224
drivers/gitee/driver.go Normal file
View File

@@ -0,0 +1,224 @@
package gitee
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
stdpath "path"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type Gitee struct {
model.Storage
Addition
client *resty.Client
}
func (d *Gitee) Config() driver.Config {
return config
}
func (d *Gitee) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Gitee) Init(ctx context.Context) error {
d.RootFolderPath = utils.FixAndCleanPath(d.RootFolderPath)
d.Endpoint = strings.TrimSpace(d.Endpoint)
if d.Endpoint == "" {
d.Endpoint = "https://gitee.com/api/v5"
}
d.Endpoint = strings.TrimSuffix(d.Endpoint, "/")
d.Owner = strings.TrimSpace(d.Owner)
d.Repo = strings.TrimSpace(d.Repo)
d.Token = strings.TrimSpace(d.Token)
d.DownloadProxy = strings.TrimSpace(d.DownloadProxy)
if d.Owner == "" || d.Repo == "" {
return errors.New("owner and repo are required")
}
d.client = base.NewRestyClient().
SetBaseURL(d.Endpoint).
SetHeader("Accept", "application/json")
repo, err := d.getRepo()
if err != nil {
return err
}
d.Ref = strings.TrimSpace(d.Ref)
if d.Ref == "" {
d.Ref = repo.DefaultBranch
}
return nil
}
func (d *Gitee) Drop(ctx context.Context) error {
return nil
}
func (d *Gitee) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
relPath := d.relativePath(dir.GetPath())
contents, err := d.listContents(relPath)
if err != nil {
return nil, err
}
objs := make([]model.Obj, 0, len(contents))
for i := range contents {
objs = append(objs, contents[i].toModelObj())
}
return objs, nil
}
func (d *Gitee) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var downloadURL string
if obj, ok := file.(*Object); ok {
downloadURL = obj.DownloadURL
if downloadURL == "" {
relPath := d.relativePath(file.GetPath())
content, err := d.getContent(relPath)
if err != nil {
return nil, err
}
if content.DownloadURL == "" {
return nil, errors.New("empty download url")
}
obj.DownloadURL = content.DownloadURL
downloadURL = content.DownloadURL
}
} else {
relPath := d.relativePath(file.GetPath())
content, err := d.getContent(relPath)
if err != nil {
return nil, err
}
if content.DownloadURL == "" {
return nil, errors.New("empty download url")
}
downloadURL = content.DownloadURL
}
url := d.applyProxy(downloadURL)
return &model.Link{
URL: url,
Header: http.Header{
"Cookie": {d.Cookie},
},
}, nil
}
func (d *Gitee) newRequest() *resty.Request {
req := d.client.R()
if d.Token != "" {
req.SetQueryParam("access_token", d.Token)
}
if d.Ref != "" {
req.SetQueryParam("ref", d.Ref)
}
return req
}
func (d *Gitee) apiPath(path string) string {
escapedOwner := url.PathEscape(d.Owner)
escapedRepo := url.PathEscape(d.Repo)
if path == "" {
return fmt.Sprintf("/repos/%s/%s/contents", escapedOwner, escapedRepo)
}
return fmt.Sprintf("/repos/%s/%s/contents/%s", escapedOwner, escapedRepo, encodePath(path))
}
func (d *Gitee) listContents(path string) ([]Content, error) {
res, err := d.newRequest().Get(d.apiPath(path))
if err != nil {
return nil, err
}
if res.IsError() {
return nil, toErr(res)
}
var contents []Content
if err := utils.Json.Unmarshal(res.Body(), &contents); err != nil {
var single Content
if err2 := utils.Json.Unmarshal(res.Body(), &single); err2 == nil && single.Type != "" {
if single.Type != "dir" {
return nil, errs.NotFolder
}
return []Content{}, nil
}
return nil, err
}
for i := range contents {
contents[i].Path = joinPath(path, contents[i].Name)
}
return contents, nil
}
func (d *Gitee) getContent(path string) (*Content, error) {
res, err := d.newRequest().Get(d.apiPath(path))
if err != nil {
return nil, err
}
if res.IsError() {
return nil, toErr(res)
}
var content Content
if err := utils.Json.Unmarshal(res.Body(), &content); err != nil {
return nil, err
}
if content.Type == "" {
return nil, errors.New("invalid response")
}
if content.Path == "" {
content.Path = path
}
return &content, nil
}
func (d *Gitee) relativePath(full string) string {
full = utils.FixAndCleanPath(full)
root := utils.FixAndCleanPath(d.RootFolderPath)
if root == "/" {
return strings.TrimPrefix(full, "/")
}
if utils.PathEqual(full, root) {
return ""
}
prefix := utils.PathAddSeparatorSuffix(root)
if strings.HasPrefix(full, prefix) {
return strings.TrimPrefix(full, prefix)
}
return strings.TrimPrefix(full, "/")
}
func (d *Gitee) applyProxy(raw string) string {
if raw == "" || d.DownloadProxy == "" {
return raw
}
proxy := d.DownloadProxy
if !strings.HasSuffix(proxy, "/") {
proxy += "/"
}
return proxy + strings.TrimLeft(raw, "/")
}
func encodePath(p string) string {
if p == "" {
return ""
}
parts := strings.Split(p, "/")
for i, part := range parts {
parts[i] = url.PathEscape(part)
}
return strings.Join(parts, "/")
}
func joinPath(base, name string) string {
if base == "" {
return name
}
return strings.TrimPrefix(stdpath.Join(base, name), "./")
}

29
drivers/gitee/meta.go Normal file
View File

@@ -0,0 +1,29 @@
package gitee
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
Endpoint string `json:"endpoint" type:"string" help:"Gitee API endpoint, default https://gitee.com/api/v5"`
Token string `json:"token" type:"string"`
Owner string `json:"owner" type:"string" required:"true"`
Repo string `json:"repo" type:"string" required:"true"`
Ref string `json:"ref" type:"string" help:"Branch, tag or commit SHA, defaults to repository default branch"`
DownloadProxy string `json:"download_proxy" type:"string" help:"Prefix added before download URLs, e.g. https://mirror.example.com/"`
Cookie string `json:"cookie" type:"string" help:"Cookie returned from user info request"`
}
var config = driver.Config{
Name: "Gitee",
LocalSort: true,
DefaultRoot: "/",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Gitee{}
})
}

60
drivers/gitee/types.go Normal file
View File

@@ -0,0 +1,60 @@
package gitee
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Links struct {
Self string `json:"self"`
Html string `json:"html"`
}
type Content struct {
Type string `json:"type"`
Size *int64 `json:"size"`
Name string `json:"name"`
Path string `json:"path"`
Sha string `json:"sha"`
URL string `json:"url"`
HtmlURL string `json:"html_url"`
DownloadURL string `json:"download_url"`
Links Links `json:"_links"`
}
func (c Content) toModelObj() model.Obj {
size := int64(0)
if c.Size != nil {
size = *c.Size
}
return &Object{
Object: model.Object{
ID: c.Path,
Name: c.Name,
Size: size,
Modified: time.Unix(0, 0),
IsFolder: c.Type == "dir",
},
DownloadURL: c.DownloadURL,
HtmlURL: c.HtmlURL,
}
}
type Object struct {
model.Object
DownloadURL string
HtmlURL string
}
func (o *Object) URL() string {
return o.DownloadURL
}
type Repo struct {
DefaultBranch string `json:"default_branch"`
}
type ErrResp struct {
Message string `json:"message"`
}

44
drivers/gitee/util.go Normal file
View File

@@ -0,0 +1,44 @@
package gitee
import (
"fmt"
"net/url"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
func (d *Gitee) getRepo() (*Repo, error) {
req := d.client.R()
if d.Token != "" {
req.SetQueryParam("access_token", d.Token)
}
if d.Cookie != "" {
req.SetHeader("Cookie", d.Cookie)
}
escapedOwner := url.PathEscape(d.Owner)
escapedRepo := url.PathEscape(d.Repo)
res, err := req.Get(fmt.Sprintf("/repos/%s/%s", escapedOwner, escapedRepo))
if err != nil {
return nil, err
}
if res.IsError() {
return nil, toErr(res)
}
var repo Repo
if err := utils.Json.Unmarshal(res.Body(), &repo); err != nil {
return nil, err
}
if repo.DefaultBranch == "" {
return nil, fmt.Errorf("failed to fetch default branch")
}
return &repo, nil
}
func toErr(res *resty.Response) error {
var errMsg ErrResp
if err := utils.Json.Unmarshal(res.Body(), &errMsg); err == nil && errMsg.Message != "" {
return fmt.Errorf("%s: %s", res.Status(), errMsg.Message)
}
return fmt.Errorf(res.Status())
}

View File

@@ -430,17 +430,35 @@ func (d *LanZou) getFilesByShareUrl(shareID, pwd string, sharePageData string) (
file.Time = timeFindReg.FindString(sharePageData)
// 重定向获取真实链接
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
headers := map[string]string{
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
}).Get(downloadUrl)
}
res, err := base.NoRedirectClient.R().SetHeaders(headers).Get(downloadUrl)
if err != nil {
return nil, err
}
rPageData := res.String()
if findAcwScV2Reg.MatchString(rPageData) {
log.Debug("lanzou: detected acw_sc__v2 challenge, recalculating cookie")
acwScV2, err := CalcAcwScV2(rPageData)
if err != nil {
return nil, err
}
// retry with calculated cookie to bypass anti-crawler validation
res, err = base.NoRedirectClient.R().
SetHeaders(headers).
SetCookie(&http.Cookie{Name: "acw_sc__v2", Value: acwScV2}).
Get(downloadUrl)
if err != nil {
return nil, err
}
rPageData = res.String()
}
file.Url = res.Header().Get("location")
// 触发验证
rPageData := res.String()
if res.StatusCode() != 302 {
param, err = htmlJsonToMap(rPageData)
if err != nil {

View File

@@ -9,8 +9,9 @@ type Addition struct {
AccessToken string `json:"access_token" required:"true"`
ProjectID string `json:"project_id"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"updated_at,title,size" default:"title"`
OrderDesc bool `json:"order_desc"`
OrderBy string `json:"order_by" type:"select" options:"updated_at,title,size" default:"title"`
OrderDesc bool `json:"order_desc"`
DeviceFingerprint string `json:"device_fingerprint" required:"true"`
}
var config = driver.Config{

View File

@@ -17,6 +17,9 @@ import (
func (d *MediaTrack) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if d.DeviceFingerprint != "" {
req.SetHeader("X-Device-Fingerprint", d.DeviceFingerprint)
}
if callback != nil {
callback(req)
}