Compare commits

...

5 Commits

Author SHA1 Message Date
千石
e1800f18e4 feat: Check usage before deleting storage (#9322)
* feat(storage): Added role and user path checking functionality

- Added `GetAllRoles` function to retrieve all roles
- Added `GetAllUsers` function to retrieve all users
- Added `firstPathSegment` function to extract the first segment of a path
- Checks whether a storage object is used by a role or user, and returns relevant information for unusing it

* fix(storage): Fixed a potential null value issue with not checking firstMount.

- Added a check to see if `firstMount` is null to prevent logic errors.
- Adjusted the loading logic of `GetAllRoles` and `GetAllUsers` to only execute when `firstMount` is non-null.
- Fixed the `usedBy` check logic to ensure that an error message is returned under the correct conditions.
- Optimized code structure to reduce unnecessary execution paths.
2025-09-12 17:56:23 +08:00
D@' 3z K!7
16cce37947 fix(drivers): add session renewal cron for MediaFire driver (#9321)
- Implement automatic session token renewal every 6-9 minutes
- Add validation for required SessionToken and Cookie fields in Init
- Handle session expiration by calling renewToken on validation failure
- Prevent storage failures due to MediaFire session timeouts

Fixes session closure issues that occur after server restarts or extended periods.

Co-authored-by: Da3zKi7 <da3zki7@duck.com>
2025-09-12 17:53:47 +08:00
千石
6e7c7d1dd0 refactor (auth): Optimize permission path processing logic (#9320)
- Changed permission path collection from map to slice to improve code readability
- Removed redundant path checks to improve path addition efficiency
- Restructured the loop logic for path processing to simplify the path permission assignment process
2025-09-11 21:16:33 +08:00
Chesyre
28a8428559 feat(driver): add Gofile storage driver (#9318)
Add support for Gofile.io cloud storage service with full CRUD operations.
Features:
- File and folder listing
- Upload and download functionality
- Create, move, rename, copy, and delete operations
- Direct link generation for file access
- API token authentication
The driver implements all required driver interfaces and follows
the existing driver patterns in the codebase.
2025-09-11 11:46:31 +08:00
D@' 3z K!7
d0026030cb feat(drivers): add MediaFire driver support (#9319)
- Implement complete MediaFire storage driver
- Add authentication via session_token and cookie
- Support all core operations: List, Get, Link, Put, Copy, Move, Remove, Rename, MakeDir
- Include thumbnail generation for media files
- Handle MediaFire's resumable upload API with multi-unit transfers
- Add proper error handling and progress reporting

Closes 请求支持Mediafire #7869

Co-authored-by: Da3zKi7 <da3zki7@duck.com>
2025-09-11 11:46:09 +08:00
16 changed files with 2083 additions and 9 deletions

View File

@@ -57,6 +57,7 @@ English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
- [x] WebDav(Support OneDrive/SharePoint without API)
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
- [x] [MediaFire](https://www.mediafire.com)
- [x] [Mediatrack](https://www.mediatrack.cn/)
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
- [x] [YandexDisk](https://disk.yandex.com/)

View File

@@ -57,6 +57,7 @@
- [x] [又拍云对象存储](https://www.upyun.com/products/file-storage)
- [x] WebDav(支持无API的OneDrive/SharePoint)
- [x] Teambition[中国](https://www.teambition.com/ )[国际](https://us.teambition.com/ )
- [x] [MediaFire](https://www.mediafire.com)
- [x] [分秒帧](https://www.mediatrack.cn/)
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云,共享群组)
- [x] [Yandex.Disk](https://disk.yandex.com/)

View File

@@ -57,6 +57,7 @@
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
- [x] WebDav(Support OneDrive/SharePoint without API)
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
- [x] [MediaFire](https://www.mediafire.com)
- [x] [Mediatrack](https://www.mediatrack.cn/)
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
- [x] [YandexDisk](https://disk.yandex.com/)

View File

@@ -32,6 +32,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/ftp"
_ "github.com/alist-org/alist/v3/drivers/github"
_ "github.com/alist-org/alist/v3/drivers/github_releases"
_ "github.com/alist-org/alist/v3/drivers/gofile"
_ "github.com/alist-org/alist/v3/drivers/google_drive"
_ "github.com/alist-org/alist/v3/drivers/google_photo"
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
@@ -41,6 +42,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/lanzou"
_ "github.com/alist-org/alist/v3/drivers/lenovonas_share"
_ "github.com/alist-org/alist/v3/drivers/local"
_ "github.com/alist-org/alist/v3/drivers/mediafire"
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
_ "github.com/alist-org/alist/v3/drivers/mega"
_ "github.com/alist-org/alist/v3/drivers/misskey"

261
drivers/gofile/driver.go Normal file
View File

@@ -0,0 +1,261 @@
package gofile
import (
"context"
"fmt"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
)
type Gofile struct {
model.Storage
Addition
accountId string
}
func (d *Gofile) Config() driver.Config {
return config
}
func (d *Gofile) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Gofile) Init(ctx context.Context) error {
if d.APIToken == "" {
return fmt.Errorf("API token is required")
}
// Get account ID
accountId, err := d.getAccountId(ctx)
if err != nil {
return fmt.Errorf("failed to get account ID: %w", err)
}
d.accountId = accountId
// Get account info to set root folder if not specified
if d.RootFolderID == "" {
accountInfo, err := d.getAccountInfo(ctx, accountId)
if err != nil {
return fmt.Errorf("failed to get account info: %w", err)
}
d.RootFolderID = accountInfo.Data.RootFolder
}
// Save driver storage
op.MustSaveDriverStorage(d)
return nil
}
func (d *Gofile) Drop(ctx context.Context) error {
return nil
}
func (d *Gofile) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var folderId string
if dir.GetID() == "" {
folderId = d.GetRootId()
} else {
folderId = dir.GetID()
}
endpoint := fmt.Sprintf("/contents/%s", folderId)
var response ContentsResponse
err := d.getJSON(ctx, endpoint, &response)
if err != nil {
return nil, err
}
var objects []model.Obj
// Process children or contents
contents := response.Data.Children
if contents == nil {
contents = response.Data.Contents
}
for _, content := range contents {
objects = append(objects, d.convertContentToObj(content))
}
return objects, nil
}
func (d *Gofile) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if file.IsDir() {
return nil, errs.NotFile
}
// Create a direct link for the file
directLink, err := d.createDirectLink(ctx, file.GetID())
if err != nil {
return nil, fmt.Errorf("failed to create direct link: %w", err)
}
return &model.Link{
URL: directLink,
}, nil
}
func (d *Gofile) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
var parentId string
if parentDir.GetID() == "" {
parentId = d.GetRootId()
} else {
parentId = parentDir.GetID()
}
data := map[string]interface{}{
"parentFolderId": parentId,
"folderName": dirName,
}
var response CreateFolderResponse
err := d.postJSON(ctx, "/contents/createFolder", data, &response)
if err != nil {
return nil, err
}
return &model.Object{
ID: response.Data.ID,
Name: response.Data.Name,
IsFolder: true,
}, nil
}
func (d *Gofile) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var dstId string
if dstDir.GetID() == "" {
dstId = d.GetRootId()
} else {
dstId = dstDir.GetID()
}
data := map[string]interface{}{
"contentsId": srcObj.GetID(),
"folderId": dstId,
}
err := d.putJSON(ctx, "/contents/move", data, nil)
if err != nil {
return nil, err
}
// Return updated object
return &model.Object{
ID: srcObj.GetID(),
Name: srcObj.GetName(),
Size: srcObj.GetSize(),
Modified: srcObj.ModTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *Gofile) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
data := map[string]interface{}{
"attribute": "name",
"attributeValue": newName,
}
var response UpdateResponse
err := d.putJSON(ctx, fmt.Sprintf("/contents/%s/update", srcObj.GetID()), data, &response)
if err != nil {
return nil, err
}
return &model.Object{
ID: srcObj.GetID(),
Name: newName,
Size: srcObj.GetSize(),
Modified: srcObj.ModTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *Gofile) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var dstId string
if dstDir.GetID() == "" {
dstId = d.GetRootId()
} else {
dstId = dstDir.GetID()
}
data := map[string]interface{}{
"contentsId": srcObj.GetID(),
"folderId": dstId,
}
var response CopyResponse
err := d.postJSON(ctx, "/contents/copy", data, &response)
if err != nil {
return nil, err
}
// Get the new ID from the response
newId := srcObj.GetID()
if response.Data.CopiedContents != nil {
if id, ok := response.Data.CopiedContents[srcObj.GetID()]; ok {
newId = id
}
}
return &model.Object{
ID: newId,
Name: srcObj.GetName(),
Size: srcObj.GetSize(),
Modified: srcObj.ModTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *Gofile) Remove(ctx context.Context, obj model.Obj) error {
data := map[string]interface{}{
"contentsId": obj.GetID(),
}
return d.deleteJSON(ctx, "/contents", data)
}
func (d *Gofile) Put(ctx context.Context, dstDir model.Obj, fileStreamer model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
var folderId string
if dstDir.GetID() == "" {
folderId = d.GetRootId()
} else {
folderId = dstDir.GetID()
}
response, err := d.uploadFile(ctx, folderId, fileStreamer, up)
if err != nil {
return nil, err
}
return &model.Object{
ID: response.Data.FileId,
Name: response.Data.FileName,
Size: fileStreamer.GetSize(),
IsFolder: false,
}, nil
}
func (d *Gofile) GetArchiveMeta(ctx context.Context, obj model.Obj, args model.ArchiveArgs) (model.ArchiveMeta, error) {
return nil, errs.NotImplement
}
func (d *Gofile) ListArchive(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) ([]model.Obj, error) {
return nil, errs.NotImplement
}
func (d *Gofile) Extract(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) (*model.Link, error) {
return nil, errs.NotImplement
}
func (d *Gofile) ArchiveDecompress(ctx context.Context, srcObj, dstDir model.Obj, args model.ArchiveDecompressArgs) ([]model.Obj, error) {
return nil, errs.NotImplement
}
var _ driver.Driver = (*Gofile)(nil)

26
drivers/gofile/meta.go Normal file
View File

@@ -0,0 +1,26 @@
package gofile
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
APIToken string `json:"api_token" required:"true" help:"Get your API token from your Gofile profile page"`
}
var config = driver.Config{
Name: "Gofile",
DefaultRoot: "",
LocalSort: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Gofile{}
})
}

124
drivers/gofile/types.go Normal file
View File

@@ -0,0 +1,124 @@
package gofile
import "time"
type APIResponse struct {
Status string `json:"status"`
Data interface{} `json:"data"`
}
type AccountResponse struct {
Status string `json:"status"`
Data struct {
ID string `json:"id"`
} `json:"data"`
}
type AccountInfoResponse struct {
Status string `json:"status"`
Data struct {
ID string `json:"id"`
Type string `json:"type"`
Email string `json:"email"`
RootFolder string `json:"rootFolder"`
} `json:"data"`
}
type Content struct {
ID string `json:"id"`
Type string `json:"type"` // "file" or "folder"
Name string `json:"name"`
Size int64 `json:"size,omitempty"`
CreateTime int64 `json:"createTime"`
ModTime int64 `json:"modTime,omitempty"`
DirectLink string `json:"directLink,omitempty"`
Children map[string]Content `json:"children,omitempty"`
ParentFolder string `json:"parentFolder,omitempty"`
MD5 string `json:"md5,omitempty"`
MimeType string `json:"mimeType,omitempty"`
Link string `json:"link,omitempty"`
}
type ContentsResponse struct {
Status string `json:"status"`
Data struct {
IsOwner bool `json:"isOwner"`
ID string `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
ParentFolder string `json:"parentFolder"`
CreateTime int64 `json:"createTime"`
ChildrenList []string `json:"childrenList,omitempty"`
Children map[string]Content `json:"children,omitempty"`
Contents map[string]Content `json:"contents,omitempty"`
Public bool `json:"public,omitempty"`
Description string `json:"description,omitempty"`
Tags string `json:"tags,omitempty"`
Expiry int64 `json:"expiry,omitempty"`
} `json:"data"`
}
type UploadResponse struct {
Status string `json:"status"`
Data struct {
DownloadPage string `json:"downloadPage"`
Code string `json:"code"`
ParentFolder string `json:"parentFolder"`
FileId string `json:"fileId"`
FileName string `json:"fileName"`
GuestToken string `json:"guestToken,omitempty"`
} `json:"data"`
}
type DirectLinkResponse struct {
Status string `json:"status"`
Data struct {
DirectLink string `json:"directLink"`
ID string `json:"id"`
} `json:"data"`
}
type CreateFolderResponse struct {
Status string `json:"status"`
Data struct {
ID string `json:"id"`
Type string `json:"type"`
Name string `json:"name"`
ParentFolder string `json:"parentFolder"`
CreateTime int64 `json:"createTime"`
} `json:"data"`
}
type CopyResponse struct {
Status string `json:"status"`
Data struct {
CopiedContents map[string]string `json:"copiedContents"` // oldId -> newId mapping
} `json:"data"`
}
type UpdateResponse struct {
Status string `json:"status"`
Data struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"data"`
}
type ErrorResponse struct {
Status string `json:"status"`
Error struct {
Message string `json:"message"`
Code string `json:"code"`
} `json:"error"`
}
func (c *Content) ModifiedTime() time.Time {
if c.ModTime > 0 {
return time.Unix(c.ModTime, 0)
}
return time.Unix(c.CreateTime, 0)
}
func (c *Content) IsDir() bool {
return c.Type == "folder"
}

257
drivers/gofile/util.go Normal file
View File

@@ -0,0 +1,257 @@
package gofile
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"mime/multipart"
"net/http"
"path/filepath"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
)
const (
baseAPI = "https://api.gofile.io"
uploadAPI = "https://upload.gofile.io"
)
func (d *Gofile) request(ctx context.Context, method, endpoint string, body io.Reader, headers map[string]string) (*http.Response, error) {
var url string
if strings.HasPrefix(endpoint, "http") {
url = endpoint
} else {
url = baseAPI + endpoint
}
req, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+d.APIToken)
req.Header.Set("User-Agent", "AList/3.0")
for k, v := range headers {
req.Header.Set(k, v)
}
return base.HttpClient.Do(req)
}
func (d *Gofile) getJSON(ctx context.Context, endpoint string, result interface{}) error {
resp, err := d.request(ctx, "GET", endpoint, nil, nil)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return d.handleError(resp)
}
return json.NewDecoder(resp.Body).Decode(result)
}
func (d *Gofile) postJSON(ctx context.Context, endpoint string, data interface{}, result interface{}) error {
jsonData, err := json.Marshal(data)
if err != nil {
return err
}
headers := map[string]string{
"Content-Type": "application/json",
}
resp, err := d.request(ctx, "POST", endpoint, bytes.NewBuffer(jsonData), headers)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return d.handleError(resp)
}
if result != nil {
return json.NewDecoder(resp.Body).Decode(result)
}
return nil
}
func (d *Gofile) putJSON(ctx context.Context, endpoint string, data interface{}, result interface{}) error {
jsonData, err := json.Marshal(data)
if err != nil {
return err
}
headers := map[string]string{
"Content-Type": "application/json",
}
resp, err := d.request(ctx, "PUT", endpoint, bytes.NewBuffer(jsonData), headers)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return d.handleError(resp)
}
if result != nil {
return json.NewDecoder(resp.Body).Decode(result)
}
return nil
}
func (d *Gofile) deleteJSON(ctx context.Context, endpoint string, data interface{}) error {
jsonData, err := json.Marshal(data)
if err != nil {
return err
}
headers := map[string]string{
"Content-Type": "application/json",
}
resp, err := d.request(ctx, "DELETE", endpoint, bytes.NewBuffer(jsonData), headers)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return d.handleError(resp)
}
return nil
}
func (d *Gofile) handleError(resp *http.Response) error {
body, _ := io.ReadAll(resp.Body)
var errorResp ErrorResponse
if err := json.Unmarshal(body, &errorResp); err == nil {
return fmt.Errorf("gofile API error: %s (code: %s)", errorResp.Error.Message, errorResp.Error.Code)
}
return fmt.Errorf("gofile API error: HTTP %d - %s", resp.StatusCode, string(body))
}
func (d *Gofile) uploadFile(ctx context.Context, folderId string, file model.FileStreamer, up driver.UpdateProgress) (*UploadResponse, error) {
var body bytes.Buffer
writer := multipart.NewWriter(&body)
if folderId != "" {
writer.WriteField("folderId", folderId)
}
part, err := writer.CreateFormFile("file", filepath.Base(file.GetName()))
if err != nil {
return nil, err
}
// Copy with progress tracking if available
if up != nil {
reader := &progressReader{
reader: file,
total: file.GetSize(),
up: up,
}
_, err = io.Copy(part, reader)
} else {
_, err = io.Copy(part, file)
}
if err != nil {
return nil, err
}
writer.Close()
headers := map[string]string{
"Content-Type": writer.FormDataContentType(),
}
resp, err := d.request(ctx, "POST", uploadAPI+"/uploadfile", &body, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, d.handleError(resp)
}
var result UploadResponse
err = json.NewDecoder(resp.Body).Decode(&result)
return &result, err
}
func (d *Gofile) createDirectLink(ctx context.Context, contentId string) (string, error) {
data := map[string]interface{}{}
var result DirectLinkResponse
err := d.postJSON(ctx, fmt.Sprintf("/contents/%s/directlinks", contentId), data, &result)
if err != nil {
return "", err
}
return result.Data.DirectLink, nil
}
func (d *Gofile) convertContentToObj(content Content) model.Obj {
return &model.ObjThumb{
Object: model.Object{
ID: content.ID,
Name: content.Name,
Size: content.Size,
Modified: content.ModifiedTime(),
IsFolder: content.IsDir(),
},
}
}
func (d *Gofile) getAccountId(ctx context.Context) (string, error) {
var result AccountResponse
err := d.getJSON(ctx, "/accounts/getid", &result)
if err != nil {
return "", err
}
return result.Data.ID, nil
}
func (d *Gofile) getAccountInfo(ctx context.Context, accountId string) (*AccountInfoResponse, error) {
var result AccountInfoResponse
err := d.getJSON(ctx, fmt.Sprintf("/accounts/%s", accountId), &result)
if err != nil {
return nil, err
}
return &result, nil
}
// progressReader wraps an io.Reader to track upload progress
type progressReader struct {
reader io.Reader
total int64
read int64
up driver.UpdateProgress
}
func (pr *progressReader) Read(p []byte) (n int, err error) {
n, err = pr.reader.Read(p)
pr.read += int64(n)
if pr.up != nil && pr.total > 0 {
progress := float64(pr.read) * 100 / float64(pr.total)
pr.up(progress)
}
return n, err
}

433
drivers/mediafire/driver.go Normal file
View File

@@ -0,0 +1,433 @@
package mediafire
/*
Package mediafire
Author: Da3zKi7<da3zki7@duck.com>
Date: 2025-09-11
D@' 3z K!7 - The King Of Cracking
*/
import (
"context"
"fmt"
"math/rand"
"net/http"
"os"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
)
type Mediafire struct {
model.Storage
Addition
cron *cron.Cron
actionToken string
appBase string
apiBase string
hostBase string
maxRetries int
secChUa string
secChUaPlatform string
userAgent string
}
func (d *Mediafire) Config() driver.Config {
return config
}
func (d *Mediafire) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Mediafire) Init(ctx context.Context) error {
if d.SessionToken == "" {
return fmt.Errorf("Init :: [MediaFire] {critical} missing sessionToken")
}
if d.Cookie == "" {
return fmt.Errorf("Init :: [MediaFire] {critical} missing Cookie")
}
if _, err := d.getSessionToken(ctx); err != nil {
d.renewToken(ctx)
num := rand.Intn(4) + 6
d.cron = cron.NewCron(time.Minute * time.Duration(num))
d.cron.Do(func() {
d.renewToken(ctx)
})
}
return nil
}
func (d *Mediafire) Drop(ctx context.Context) error {
return nil
}
func (d *Mediafire) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(ctx, dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return d.fileToObj(src), nil
})
}
func (d *Mediafire) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
downloadUrl, err := d.getDirectDownloadLink(ctx, file.GetID())
if err != nil {
return nil, err
}
res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(downloadUrl)
if err != nil {
return nil, err
}
defer func() {
_ = res.RawBody().Close()
}()
if res.StatusCode() == 302 {
downloadUrl = res.Header().Get("location")
}
return &model.Link{
URL: downloadUrl,
Header: http.Header{
"Origin": []string{d.appBase},
"Referer": []string{d.appBase + "/"},
"sec-ch-ua": []string{d.secChUa},
"sec-ch-ua-platform": []string{d.secChUaPlatform},
"User-Agent": []string{d.userAgent},
//"User-Agent": []string{base.UserAgent},
},
}, nil
}
func (d *Mediafire) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
data := map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"parent_key": parentDir.GetID(),
"foldername": dirName,
}
var resp MediafireFolderCreateResponse
_, err := d.postForm("/folder/create.php", data, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
created, _ := time.Parse("2006-01-02T15:04:05Z", resp.Response.CreatedUTC)
return &model.ObjThumb{
Object: model.Object{
ID: resp.Response.FolderKey,
Name: resp.Response.Name,
Size: 0,
Modified: created,
Ctime: created,
IsFolder: true,
},
Thumbnail: model.Thumbnail{},
}, nil
}
func (d *Mediafire) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var data map[string]string
var endpoint string
if srcObj.IsDir() {
endpoint = "/folder/move.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"folder_key_src": srcObj.GetID(),
"folder_key_dst": dstDir.GetID(),
}
} else {
endpoint = "/file/move.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"quick_key": srcObj.GetID(),
"folder_key": dstDir.GetID(),
}
}
var resp MediafireMoveResponse
_, err := d.postForm(endpoint, data, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
return srcObj, nil
}
func (d *Mediafire) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
var data map[string]string
var endpoint string
if srcObj.IsDir() {
endpoint = "/folder/update.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"folder_key": srcObj.GetID(),
"foldername": newName,
}
} else {
endpoint = "/file/update.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"quick_key": srcObj.GetID(),
"filename": newName,
}
}
var resp MediafireRenameResponse
_, err := d.postForm(endpoint, data, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
return &model.ObjThumb{
Object: model.Object{
ID: srcObj.GetID(),
Name: newName,
Size: srcObj.GetSize(),
Modified: srcObj.ModTime(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
},
Thumbnail: model.Thumbnail{},
}, nil
}
func (d *Mediafire) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var data map[string]string
var endpoint string
if srcObj.IsDir() {
endpoint = "/folder/copy.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"folder_key_src": srcObj.GetID(),
"folder_key_dst": dstDir.GetID(),
}
} else {
endpoint = "/file/copy.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"quick_key": srcObj.GetID(),
"folder_key": dstDir.GetID(),
}
}
var resp MediafireCopyResponse
_, err := d.postForm(endpoint, data, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
var newID string
if srcObj.IsDir() {
if len(resp.Response.NewFolderKeys) > 0 {
newID = resp.Response.NewFolderKeys[0]
}
} else {
if len(resp.Response.NewQuickKeys) > 0 {
newID = resp.Response.NewQuickKeys[0]
}
}
return &model.ObjThumb{
Object: model.Object{
ID: newID,
Name: srcObj.GetName(),
Size: srcObj.GetSize(),
Modified: srcObj.ModTime(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
},
Thumbnail: model.Thumbnail{},
}, nil
}
func (d *Mediafire) Remove(ctx context.Context, obj model.Obj) error {
var data map[string]string
var endpoint string
if obj.IsDir() {
endpoint = "/folder/delete.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"folder_key": obj.GetID(),
}
} else {
endpoint = "/file/delete.php"
data = map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"quick_key": obj.GetID(),
}
}
var resp MediafireRemoveResponse
_, err := d.postForm(endpoint, data, &resp)
if err != nil {
return err
}
if resp.Response.Result != "Success" {
return fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
return nil
}
func (d *Mediafire) Put(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
_, err := d.PutResult(ctx, dstDir, file, up)
return err
}
func (d *Mediafire) PutResult(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer tempFile.Close()
osFile, ok := tempFile.(*os.File)
if !ok {
return nil, fmt.Errorf("expected *os.File, got %T", tempFile)
}
fileHash, err := d.calculateSHA256(osFile)
if err != nil {
return nil, err
}
checkResp, err := d.uploadCheck(ctx, file.GetName(), file.GetSize(), fileHash, dstDir.GetID())
if err != nil {
return nil, err
}
if checkResp.Response.ResumableUpload.AllUnitsReady == "yes" {
up(100.0)
}
if checkResp.Response.HashExists == "yes" && checkResp.Response.InAccount == "yes" {
up(100.0)
existingFile, err := d.getExistingFileInfo(ctx, fileHash, file.GetName(), dstDir.GetID())
if err == nil {
return existingFile, nil
}
}
var pollKey string
if checkResp.Response.ResumableUpload.AllUnitsReady != "yes" {
var err error
pollKey, err = d.uploadUnits(ctx, osFile, checkResp, file.GetName(), fileHash, dstDir.GetID(), up)
if err != nil {
return nil, err
}
} else {
pollKey = checkResp.Response.ResumableUpload.UploadKey
}
//fmt.Printf("pollKey: %+v\n", pollKey)
pollResp, err := d.pollUpload(ctx, pollKey)
if err != nil {
return nil, err
}
quickKey := pollResp.Response.Doupload.QuickKey
return &model.ObjThumb{
Object: model.Object{
ID: quickKey,
Name: file.GetName(),
Size: file.GetSize(),
},
Thumbnail: model.Thumbnail{},
}, nil
}
func (d *Mediafire) GetArchiveMeta(ctx context.Context, obj model.Obj, args model.ArchiveArgs) (model.ArchiveMeta, error) {
// TODO get archive file meta-info, return errs.NotImplement to use an internal archive tool, optional
return nil, errs.NotImplement
}
func (d *Mediafire) ListArchive(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) ([]model.Obj, error) {
// TODO list args.InnerPath in the archive obj, return errs.NotImplement to use an internal archive tool, optional
return nil, errs.NotImplement
}
func (d *Mediafire) Extract(ctx context.Context, obj model.Obj, args model.ArchiveInnerArgs) (*model.Link, error) {
// TODO return link of file args.InnerPath in the archive obj, return errs.NotImplement to use an internal archive tool, optional
return nil, errs.NotImplement
}
func (d *Mediafire) ArchiveDecompress(ctx context.Context, srcObj, dstDir model.Obj, args model.ArchiveDecompressArgs) ([]model.Obj, error) {
// TODO extract args.InnerPath path in the archive srcObj to the dstDir location, optional
// a folder with the same name as the archive file needs to be created to store the extracted results if args.PutIntoNewDir
// return errs.NotImplement to use an internal archive tool
return nil, errs.NotImplement
}
//func (d *Mediafire) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*Mediafire)(nil)

54
drivers/mediafire/meta.go Normal file
View File

@@ -0,0 +1,54 @@
package mediafire
/*
Package mediafire
Author: Da3zKi7<da3zki7@duck.com>
Date: 2025-09-11
D@' 3z K!7 - The King Of Cracking
*/
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
//driver.RootID
SessionToken string `json:"session_token" required:"true" type:"string" help:"Required for MediaFire API"`
Cookie string `json:"cookie" required:"true" type:"string" help:"Required for navigation"`
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
ChunkSize int64 `json:"chunk_size" type:"number" default:"100"`
}
var config = driver.Config{
Name: "MediaFire",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "/",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Mediafire{
appBase: "https://app.mediafire.com",
apiBase: "https://www.mediafire.com/api/1.5",
hostBase: "https://www.mediafire.com",
maxRetries: 3,
secChUa: "\"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"139\", \"Google Chrome\";v=\"139\"",
secChUaPlatform: "Windows",
userAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/139.0.0.0 Safari/537.36",
}
})
}

232
drivers/mediafire/types.go Normal file
View File

@@ -0,0 +1,232 @@
package mediafire
/*
Package mediafire
Author: Da3zKi7<da3zki7@duck.com>
Date: 2025-09-11
D@' 3z K!7 - The King Of Cracking
*/
type MediafireRenewTokenResponse struct {
Response struct {
Action string `json:"action"`
SessionToken string `json:"session_token"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafireResponse struct {
Response struct {
Action string `json:"action"`
FolderContent struct {
ChunkSize string `json:"chunk_size"`
ContentType string `json:"content_type"`
ChunkNumber string `json:"chunk_number"`
FolderKey string `json:"folderkey"`
Folders []MediafireFolder `json:"folders,omitempty"`
Files []MediafireFile `json:"files,omitempty"`
MoreChunks string `json:"more_chunks"`
} `json:"folder_content"`
Result string `json:"result"`
} `json:"response"`
}
type MediafireFolder struct {
FolderKey string `json:"folderkey"`
Name string `json:"name"`
Created string `json:"created"`
CreatedUTC string `json:"created_utc"`
}
type MediafireFile struct {
QuickKey string `json:"quickkey"`
Filename string `json:"filename"`
Size string `json:"size"`
Created string `json:"created"`
CreatedUTC string `json:"created_utc"`
MimeType string `json:"mimetype"`
}
type File struct {
ID string
Name string
Size int64
CreatedUTC string
IsFolder bool
}
type FolderContentResponse struct {
Folders []MediafireFolder
Files []MediafireFile
MoreChunks bool
}
type MediafireLinksResponse struct {
Response struct {
Action string `json:"action"`
Links []struct {
QuickKey string `json:"quickkey"`
View string `json:"view"`
NormalDownload string `json:"normal_download"`
OneTime struct {
Download string `json:"download"`
View string `json:"view"`
} `json:"one_time"`
} `json:"links"`
OneTimeKeyRequestCount string `json:"one_time_key_request_count"`
OneTimeKeyRequestMaxCount string `json:"one_time_key_request_max_count"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafireDirectDownloadResponse struct {
Response struct {
Action string `json:"action"`
Links []struct {
QuickKey string `json:"quickkey"`
DirectDownload string `json:"direct_download"`
} `json:"links"`
DirectDownloadFreeBandwidth string `json:"direct_download_free_bandwidth"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafireFolderCreateResponse struct {
Response struct {
Action string `json:"action"`
FolderKey string `json:"folder_key"`
UploadKey string `json:"upload_key"`
ParentFolderKey string `json:"parent_folderkey"`
Name string `json:"name"`
Description string `json:"description"`
Created string `json:"created"`
CreatedUTC string `json:"created_utc"`
Privacy string `json:"privacy"`
FileCount string `json:"file_count"`
FolderCount string `json:"folder_count"`
Revision string `json:"revision"`
DropboxEnabled string `json:"dropbox_enabled"`
Flag string `json:"flag"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
NewDeviceRevision int `json:"new_device_revision"`
} `json:"response"`
}
type MediafireMoveResponse struct {
Response struct {
Action string `json:"action"`
Asynchronous string `json:"asynchronous,omitempty"`
NewNames []string `json:"new_names"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
NewDeviceRevision int `json:"new_device_revision"`
} `json:"response"`
}
type MediafireRenameResponse struct {
Response struct {
Action string `json:"action"`
Asynchronous string `json:"asynchronous,omitempty"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
NewDeviceRevision int `json:"new_device_revision"`
} `json:"response"`
}
type MediafireCopyResponse struct {
Response struct {
Action string `json:"action"`
Asynchronous string `json:"asynchronous,omitempty"`
NewQuickKeys []string `json:"new_quickkeys,omitempty"`
NewFolderKeys []string `json:"new_folderkeys,omitempty"`
SkippedCount string `json:"skipped_count,omitempty"`
OtherCount string `json:"other_count,omitempty"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
NewDeviceRevision int `json:"new_device_revision"`
} `json:"response"`
}
type MediafireRemoveResponse struct {
Response struct {
Action string `json:"action"`
Asynchronous string `json:"asynchronous,omitempty"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
NewDeviceRevision int `json:"new_device_revision"`
} `json:"response"`
}
type MediafireCheckResponse struct {
Response struct {
Action string `json:"action"`
HashExists string `json:"hash_exists"`
InAccount string `json:"in_account"`
InFolder string `json:"in_folder"`
FileExists string `json:"file_exists"`
ResumableUpload struct {
AllUnitsReady string `json:"all_units_ready"`
NumberOfUnits string `json:"number_of_units"`
UnitSize string `json:"unit_size"`
Bitmap struct {
Count string `json:"count"`
Words []string `json:"words"`
} `json:"bitmap"`
UploadKey string `json:"upload_key"`
} `json:"resumable_upload"`
AvailableSpace string `json:"available_space"`
UsedStorageSize string `json:"used_storage_size"`
StorageLimit string `json:"storage_limit"`
StorageLimitExceeded string `json:"storage_limit_exceeded"`
UploadURL struct {
Simple string `json:"simple"`
SimpleFallback string `json:"simple_fallback"`
Resumable string `json:"resumable"`
ResumableFallback string `json:"resumable_fallback"`
} `json:"upload_url"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafireActionTokenResponse struct {
Response struct {
Action string `json:"action"`
ActionToken string `json:"action_token"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafirePollResponse struct {
Response struct {
Action string `json:"action"`
Doupload struct {
Result string `json:"result"`
Status string `json:"status"`
Description string `json:"description"`
QuickKey string `json:"quickkey"`
Hash string `json:"hash"`
Filename string `json:"filename"`
Size string `json:"size"`
Created string `json:"created"`
CreatedUTC string `json:"created_utc"`
Revision string `json:"revision"`
} `json:"doupload"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}
type MediafireFileSearchResponse struct {
Response struct {
Action string `json:"action"`
FileInfo []File `json:"file_info"`
Result string `json:"result"`
CurrentAPIVersion string `json:"current_api_version"`
} `json:"response"`
}

626
drivers/mediafire/util.go Normal file
View File

@@ -0,0 +1,626 @@
package mediafire
/*
Package mediafire
Author: Da3zKi7<da3zki7@duck.com>
Date: 2025-09-11
D@' 3z K!7 - The King Of Cracking
*/
import (
"bytes"
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
)
func (d *Mediafire) getSessionToken(ctx context.Context) (string, error) {
tokenURL := d.hostBase + "/application/get_session_token.php"
req, err := http.NewRequestWithContext(ctx, http.MethodPost, tokenURL, nil)
if err != nil {
return "", err
}
req.Header.Set("Accept", "*/*")
req.Header.Set("Accept-Encoding", "gzip, deflate, br, zstd")
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
req.Header.Set("Content-Length", "0")
req.Header.Set("Cookie", d.Cookie)
req.Header.Set("DNT", "1")
req.Header.Set("Origin", d.hostBase)
req.Header.Set("Priority", "u=1, i")
req.Header.Set("Referer", (d.hostBase + "/"))
req.Header.Set("Sec-Ch-Ua", d.secChUa)
req.Header.Set("Sec-Ch-Ua-Mobile", "?0")
req.Header.Set("Sec-Ch-Ua-Platform", d.secChUaPlatform)
req.Header.Set("Sec-Fetch-Dest", "empty")
req.Header.Set("Sec-Fetch-Mode", "cors")
req.Header.Set("Sec-Fetch-Site", "same-site")
req.Header.Set("User-Agent", d.userAgent)
//req.Header.Set("Connection", "keep-alive")
resp, err := base.HttpClient.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
//fmt.Printf("getSessionToken :: Raw response: %s\n", string(body))
//fmt.Printf("getSessionToken :: Parsed response: %+v\n", resp)
var tokenResp struct {
Response struct {
SessionToken string `json:"session_token"`
} `json:"response"`
}
if resp.StatusCode == 200 {
if err := json.Unmarshal(body, &tokenResp); err != nil {
return "", err
}
if tokenResp.Response.SessionToken == "" {
return "", fmt.Errorf("empty session token received")
}
cookieMap := make(map[string]string)
for _, cookie := range resp.Cookies() {
cookieMap[cookie.Name] = cookie.Value
}
if len(cookieMap) > 0 {
var cookies []string
for name, value := range cookieMap {
cookies = append(cookies, fmt.Sprintf("%s=%s", name, value))
}
d.Cookie = strings.Join(cookies, "; ")
op.MustSaveDriverStorage(d)
//fmt.Printf("getSessionToken :: Captured cookies: %s\n", d.Cookie)
}
} else {
return "", fmt.Errorf("getSessionToken :: failed to get session token, status code: %d", resp.StatusCode)
}
d.SessionToken = tokenResp.Response.SessionToken
//fmt.Printf("Init :: Obtain Session Token %v", d.SessionToken)
op.MustSaveDriverStorage(d)
return d.SessionToken, nil
}
func (d *Mediafire) renewToken(_ context.Context) error {
query := map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
}
var resp MediafireRenewTokenResponse
_, err := d.postForm("/user/renew_session_token.php", query, &resp)
if err != nil {
return fmt.Errorf("failed to renew token: %w", err)
}
//fmt.Printf("getInfo :: Raw response: %s\n", string(body))
//fmt.Printf("getInfo :: Parsed response: %+v\n", resp)
if resp.Response.Result != "Success" {
return fmt.Errorf("MediaFire token renewal failed: %s", resp.Response.Result)
}
d.SessionToken = resp.Response.SessionToken
//fmt.Printf("Init :: Renew Session Token: %s", resp.Response.Result)
op.MustSaveDriverStorage(d)
return nil
}
func (d *Mediafire) getFiles(ctx context.Context, folderKey string) ([]File, error) {
files := make([]File, 0)
hasMore := true
chunkNumber := 1
for hasMore {
resp, err := d.getFolderContent(ctx, folderKey, chunkNumber)
if err != nil {
return nil, err
}
for _, folder := range resp.Folders {
files = append(files, File{
ID: folder.FolderKey,
Name: folder.Name,
Size: 0,
CreatedUTC: folder.CreatedUTC,
IsFolder: true,
})
}
for _, file := range resp.Files {
size, _ := strconv.ParseInt(file.Size, 10, 64)
files = append(files, File{
ID: file.QuickKey,
Name: file.Filename,
Size: size,
CreatedUTC: file.CreatedUTC,
IsFolder: false,
})
}
hasMore = resp.MoreChunks
chunkNumber++
}
return files, nil
}
func (d *Mediafire) getFolderContent(ctx context.Context, folderKey string, chunkNumber int) (*FolderContentResponse, error) {
foldersResp, err := d.getFolderContentByType(ctx, folderKey, "folders", chunkNumber)
if err != nil {
return nil, err
}
filesResp, err := d.getFolderContentByType(ctx, folderKey, "files", chunkNumber)
if err != nil {
return nil, err
}
return &FolderContentResponse{
Folders: foldersResp.Response.FolderContent.Folders,
Files: filesResp.Response.FolderContent.Files,
MoreChunks: foldersResp.Response.FolderContent.MoreChunks == "yes" || filesResp.Response.FolderContent.MoreChunks == "yes",
}, nil
}
func (d *Mediafire) getFolderContentByType(_ context.Context, folderKey, contentType string, chunkNumber int) (*MediafireResponse, error) {
data := map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"folder_key": folderKey,
"content_type": contentType,
"chunk": strconv.Itoa(chunkNumber),
"chunk_size": strconv.FormatInt(d.ChunkSize, 10),
"details": "yes",
"order_direction": d.OrderDirection,
"order_by": d.OrderBy,
"filter": "",
}
var resp MediafireResponse
_, err := d.postForm("/folder/get_content.php", data, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
return &resp, nil
}
func (d *Mediafire) fileToObj(f File) *model.ObjThumb {
created, _ := time.Parse("2006-01-02T15:04:05Z", f.CreatedUTC)
var thumbnailURL string
if !f.IsFolder && f.ID != "" {
thumbnailURL = d.hostBase + "/convkey/acaa/" + f.ID + "3g.jpg"
}
return &model.ObjThumb{
Object: model.Object{
ID: f.ID,
//Path: "",
Name: f.Name,
Size: f.Size,
Modified: created,
Ctime: created,
IsFolder: f.IsFolder,
},
Thumbnail: model.Thumbnail{
Thumbnail: thumbnailURL,
},
}
}
func (d *Mediafire) getForm(endpoint string, query map[string]string, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetQueryParams(query)
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
//"User-Agent": base.UserAgent,
"User-Agent": d.userAgent,
"Origin": d.appBase,
"Referer": d.appBase + "/",
})
// If response OK
if resp != nil {
req.SetResult(resp)
}
// Targets MediaFire API
res, err := req.Get(d.apiBase + endpoint)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *Mediafire) postForm(endpoint string, data map[string]string, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetFormData(data)
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Content-Type": "application/x-www-form-urlencoded",
//"User-Agent": base.UserAgent,
"User-Agent": d.userAgent,
"Origin": d.appBase,
"Referer": d.appBase + "/",
})
// If response OK
if resp != nil {
req.SetResult(resp)
}
// Targets MediaFire API
res, err := req.Post(d.apiBase + endpoint)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *Mediafire) getDirectDownloadLink(_ context.Context, fileID string) (string, error) {
data := map[string]string{
"session_token": d.SessionToken,
"quick_key": fileID,
"link_type": "direct_download",
"response_format": "json",
}
var resp MediafireDirectDownloadResponse
_, err := d.getForm("/file/get_links.php", data, &resp)
if err != nil {
return "", err
}
if resp.Response.Result != "Success" {
return "", fmt.Errorf("MediaFire API error: %s", resp.Response.Result)
}
if len(resp.Response.Links) == 0 {
return "", fmt.Errorf("no download links found")
}
return resp.Response.Links[0].DirectDownload, nil
}
func (d *Mediafire) calculateSHA256(file *os.File) (string, error) {
hasher := sha256.New()
if _, err := file.Seek(0, 0); err != nil {
return "", err
}
if _, err := io.Copy(hasher, file); err != nil {
return "", err
}
return hex.EncodeToString(hasher.Sum(nil)), nil
}
func (d *Mediafire) uploadCheck(ctx context.Context, filename string, filesize int64, filehash, folderKey string) (*MediafireCheckResponse, error) {
actionToken, err := d.getActionToken(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get action token: %w", err)
}
query := map[string]string{
"session_token": actionToken, /* d.SessionToken */
"filename": filename,
"size": strconv.FormatInt(filesize, 10),
"hash": filehash,
"folder_key": folderKey,
"resumable": "yes",
"response_format": "json",
}
var resp MediafireCheckResponse
_, err = d.postForm("/upload/check.php", query, &resp)
if err != nil {
return nil, err
}
//fmt.Printf("uploadCheck :: Raw response: %s\n", string(body))
//fmt.Printf("uploadCheck :: Parsed response: %+v\n", resp)
//fmt.Printf("uploadCheck :: ResumableUpload section: %+v\n", resp.Response.ResumableUpload)
//fmt.Printf("uploadCheck :: Upload key specifically: '%s'\n", resp.Response.ResumableUpload.UploadKey)
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire upload check failed: %s", resp.Response.Result)
}
return &resp, nil
}
func (d *Mediafire) resumableUpload(ctx context.Context, folderKey, uploadKey string, unitData []byte, unitID int, fileHash, filename string, totalFileSize int64) (string, error) {
actionToken, err := d.getActionToken(ctx)
if err != nil {
return "", err
}
url := d.apiBase + "/upload/resumable.php"
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(unitData))
if err != nil {
return "", err
}
q := req.URL.Query()
q.Add("folder_key", folderKey)
q.Add("response_format", "json")
q.Add("session_token", actionToken)
q.Add("key", uploadKey)
req.URL.RawQuery = q.Encode()
req.Header.Set("x-filehash", fileHash)
req.Header.Set("x-filesize", strconv.FormatInt(totalFileSize, 10))
req.Header.Set("x-unit-id", strconv.Itoa(unitID))
req.Header.Set("x-unit-size", strconv.FormatInt(int64(len(unitData)), 10))
req.Header.Set("x-unit-hash", d.sha256Hex(bytes.NewReader(unitData)))
req.Header.Set("x-filename", filename)
req.Header.Set("Content-Type", "application/octet-stream")
req.ContentLength = int64(len(unitData))
/* fmt.Printf("Debug resumable upload request:\n")
fmt.Printf(" URL: %s\n", req.URL.String())
fmt.Printf(" Headers: %+v\n", req.Header)
fmt.Printf(" Unit ID: %d\n", unitID)
fmt.Printf(" Unit Size: %d\n", len(unitData))
fmt.Printf(" Upload Key: %s\n", uploadKey)
fmt.Printf(" Action Token: %s\n", actionToken) */
res, err := base.HttpClient.Do(req)
if err != nil {
return "", err
}
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
return "", fmt.Errorf("failed to read response body: %v", err)
}
//fmt.Printf("MediaFire resumable upload response (status %d): %s\n", res.StatusCode, string(body))
var uploadResp struct {
Response struct {
Doupload struct {
Key string `json:"key"`
} `json:"doupload"`
Result string `json:"result"`
} `json:"response"`
}
if err := json.Unmarshal(body, &uploadResp); err != nil {
return "", fmt.Errorf("failed to parse response: %v", err)
}
if res.StatusCode != 200 {
return "", fmt.Errorf("resumable upload failed with status %d", res.StatusCode)
}
return uploadResp.Response.Doupload.Key, nil
}
func (d *Mediafire) uploadUnits(ctx context.Context, file *os.File, checkResp *MediafireCheckResponse, filename, fileHash, folderKey string, up driver.UpdateProgress) (string, error) {
unitSize, _ := strconv.ParseInt(checkResp.Response.ResumableUpload.UnitSize, 10, 64)
numUnits, _ := strconv.Atoi(checkResp.Response.ResumableUpload.NumberOfUnits)
uploadKey := checkResp.Response.ResumableUpload.UploadKey
stringWords := checkResp.Response.ResumableUpload.Bitmap.Words
intWords := make([]int, len(stringWords))
for i, word := range stringWords {
intWords[i], _ = strconv.Atoi(word)
}
var finalUploadKey string
for unitID := 0; unitID < numUnits; unitID++ {
if utils.IsCanceled(ctx) {
return "", ctx.Err()
}
if d.isUnitUploaded(intWords, unitID) {
up(float64(unitID+1) * 100 / float64(numUnits))
continue
}
uploadKey, err := d.uploadSingleUnit(ctx, file, unitID, unitSize, fileHash, filename, uploadKey, folderKey)
if err != nil {
return "", err
}
finalUploadKey = uploadKey
up(float64(unitID+1) * 100 / float64(numUnits))
}
return finalUploadKey, nil
}
func (d *Mediafire) uploadSingleUnit(ctx context.Context, file *os.File, unitID int, unitSize int64, fileHash, filename, uploadKey, folderKey string) (string, error) {
start := int64(unitID) * unitSize
size := unitSize
stat, err := file.Stat()
if err != nil {
return "", err
}
fileSize := stat.Size()
if start+size > fileSize {
size = fileSize - start
}
unitData := make([]byte, size)
if _, err := file.ReadAt(unitData, start); err != nil {
return "", err
}
return d.resumableUpload(ctx, folderKey, uploadKey, unitData, unitID, fileHash, filename, fileSize)
}
func (d *Mediafire) getActionToken(_ context.Context) (string, error) {
if d.actionToken != "" {
return d.actionToken, nil
}
data := map[string]string{
"type": "upload",
"lifespan": "1440",
"response_format": "json",
"session_token": d.SessionToken,
}
var resp MediafireActionTokenResponse
_, err := d.postForm("/user/get_action_token.php", data, &resp)
if err != nil {
return "", err
}
if resp.Response.Result != "Success" {
return "", fmt.Errorf("MediaFire action token failed: %s", resp.Response.Result)
}
return resp.Response.ActionToken, nil
}
func (d *Mediafire) pollUpload(ctx context.Context, key string) (*MediafirePollResponse, error) {
actionToken, err := d.getActionToken(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get action token: %w", err)
}
//fmt.Printf("Debug Key: %+v\n", key)
query := map[string]string{
"key": key,
"response_format": "json",
"session_token": actionToken, /* d.SessionToken */
}
var resp MediafirePollResponse
_, err = d.postForm("/upload/poll_upload.php", query, &resp)
if err != nil {
return nil, err
}
//fmt.Printf("pollUpload :: Raw response: %s\n", string(body))
//fmt.Printf("pollUpload :: Parsed response: %+v\n", resp)
//fmt.Printf("pollUpload :: Debug Result: %+v\n", resp.Response.Result)
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire poll upload failed: %s", resp.Response.Result)
}
return &resp, nil
}
func (d *Mediafire) sha256Hex(r io.Reader) string {
h := sha256.New()
io.Copy(h, r)
return hex.EncodeToString(h.Sum(nil))
}
func (d *Mediafire) isUnitUploaded(words []int, unitID int) bool {
wordIndex := unitID / 16
bitIndex := unitID % 16
if wordIndex >= len(words) {
return false
}
return (words[wordIndex]>>bitIndex)&1 == 1
}
func (d *Mediafire) getExistingFileInfo(ctx context.Context, fileHash, filename, folderKey string) (*model.ObjThumb, error) {
if fileInfo, err := d.getFileByHash(ctx, fileHash); err == nil && fileInfo != nil {
return fileInfo, nil
}
files, err := d.getFiles(ctx, folderKey)
if err != nil {
return nil, err
}
for _, file := range files {
if file.Name == filename && !file.IsFolder {
return d.fileToObj(file), nil
}
}
return nil, fmt.Errorf("existing file not found")
}
func (d *Mediafire) getFileByHash(_ context.Context, hash string) (*model.ObjThumb, error) {
query := map[string]string{
"session_token": d.SessionToken,
"response_format": "json",
"hash": hash,
}
var resp MediafireFileSearchResponse
_, err := d.postForm("/file/get_info.php", query, &resp)
if err != nil {
return nil, err
}
if resp.Response.Result != "Success" {
return nil, fmt.Errorf("MediaFire file search failed: %s", resp.Response.Result)
}
if len(resp.Response.FileInfo) == 0 {
return nil, fmt.Errorf("file not found by hash")
}
file := resp.Response.FileInfo[0]
return d.fileToObj(file), nil
}

View File

@@ -34,6 +34,14 @@ func GetRoles(pageIndex, pageSize int) (roles []model.Role, count int64, err err
return roles, count, nil
}
func GetAllRoles() ([]model.Role, error) {
var roles []model.Role
if err := db.Find(&roles).Error; err != nil {
return nil, errors.WithStack(err)
}
return roles, nil
}
func CreateRole(r *model.Role) error {
if err := db.Create(r).Error; err != nil {
return errors.WithStack(err)

View File

@@ -83,6 +83,14 @@ func GetUsers(pageIndex, pageSize int) (users []model.User, count int64, err err
return users, count, nil
}
func GetAllUsers() ([]model.User, error) {
var users []model.User
if err := db.Find(&users).Error; err != nil {
return nil, errors.WithStack(err)
}
return users, nil
}
func DeleteUserById(id uint) error {
return errors.WithStack(db.Delete(&model.User{}, id).Error)
}

View File

@@ -41,6 +41,18 @@ func GetStorageByMountPath(mountPath string) (driver.Driver, error) {
return storageDriver, nil
}
func firstPathSegment(p string) string {
p = utils.FixAndCleanPath(p)
p = strings.TrimPrefix(p, "/")
if p == "" {
return ""
}
if i := strings.Index(p, "/"); i >= 0 {
return p[:i]
}
return p
}
// CreateStorage Save the storage to database so storage can get an id
// then instantiate corresponding driver and save it in memory
func CreateStorage(ctx context.Context, storage model.Storage) (uint, error) {
@@ -267,6 +279,34 @@ func DeleteStorageById(ctx context.Context, id uint) error {
if err != nil {
return errors.WithMessage(err, "failed get storage")
}
firstMount := firstPathSegment(storage.MountPath)
if firstMount != "" {
roles, err := db.GetAllRoles()
if err != nil {
return errors.WithMessage(err, "failed to load roles")
}
users, err := db.GetAllUsers()
if err != nil {
return errors.WithMessage(err, "failed to load users")
}
var usedBy []string
for _, r := range roles {
for _, entry := range r.PermissionScopes {
if firstPathSegment(entry.Path) == firstMount {
usedBy = append(usedBy, "role:"+r.Name)
break
}
}
}
for _, u := range users {
if firstPathSegment(u.BasePath) == firstMount {
usedBy = append(usedBy, "user:"+u.Username)
}
}
if len(usedBy) > 0 {
return errors.Errorf("storage is used by %s, please cancel usage first", strings.Join(usedBy, ", "))
}
}
if !storage.Disabled {
storageDriver, err := GetStorageByMountPath(storage.MountPath)
if err != nil {

View File

@@ -165,25 +165,25 @@ func CurrentUser(c *gin.Context) {
var roleNames []string
permMap := map[string]int32{}
addedPaths := map[string]bool{}
paths := make([]string, 0)
for _, role := range user.RolesDetail {
roleNames = append(roleNames, role.Name)
for _, entry := range role.PermissionScopes {
cleanPath := path.Clean("/" + strings.TrimPrefix(entry.Path, "/"))
if _, ok := permMap[cleanPath]; !ok {
paths = append(paths, cleanPath)
}
permMap[cleanPath] |= entry.Permission
}
}
userResp.RoleNames = roleNames
for fullPath, perm := range permMap {
if !addedPaths[fullPath] {
userResp.Permissions = append(userResp.Permissions, model.PermissionEntry{
Path: fullPath,
Permission: perm,
})
addedPaths[fullPath] = true
}
for _, fullPath := range paths {
userResp.Permissions = append(userResp.Permissions, model.PermissionEntry{
Path: fullPath,
Permission: permMap[fullPath],
})
}
common.SuccessResp(c, userResp)