This commit is contained in:
www.xueximeng.com
2025-10-28 21:54:24 +08:00
parent 538abef99d
commit 7e265aa7b6

View File

@@ -23,20 +23,19 @@ import (
"time"
"unsafe"
"github.com/gin-gonic/gin"
"pansou/model"
"pansou/plugin"
"pansou/util/json"
"github.com/gin-gonic/gin"
cloudscraper "github.com/Advik-B/cloudscraper/lib"
)
// 插件配置参数
const (
MaxConcurrentUsers = 10 // 最多使用的用户数
MaxConcurrentDetails = 50 // 最大并发详情请求数
DebugLog = true // 调试日志开关排查问题时改为true
MaxConcurrentUsers = 10 // 最多使用的用户数
MaxConcurrentDetails = 50 // 最大并发详情请求数
DebugLog = true // 调试日志开关排查问题时改为true
)
// 默认账户配置可通过Web界面添加更多账户
@@ -58,9 +57,9 @@ func init() {
if cachePath == "" {
cachePath = "./cache"
}
StorageDir = filepath.Join(cachePath, "gying_users")
if err := os.MkdirAll(StorageDir, 0755); err != nil {
fmt.Printf("⚠️ 警告: 无法创建Gying存储目录 %s: %v\n", StorageDir, err)
} else {
@@ -420,8 +419,8 @@ type SearchData struct {
// DetailData 详情接口JSON数据结构
type DetailData struct {
Code int `json:"code"`
WP bool `json:"wp"`
Code int `json:"code"`
WP bool `json:"wp"`
Panlist struct {
ID []string `json:"id"`
Name []string `json:"name"`
@@ -468,7 +467,7 @@ func (p *GyingPlugin) RegisterWebRoutes(router *gin.RouterGroup) {
gying := router.Group("/gying")
gying.GET("/:param", p.handleManagePage)
gying.POST("/:param", p.handleManagePagePOST)
fmt.Printf("[Gying] Web路由已注册: /gying/:param\n")
}
@@ -492,7 +491,7 @@ func (p *GyingPlugin) SearchWithResult(keyword string, ext map[string]interface{
if DebugLog {
fmt.Printf("[Gying] 找到 %d 个有效用户\n", len(users))
}
if len(users) == 0 {
if DebugLog {
fmt.Printf("[Gying] 没有有效用户,返回空结果\n")
@@ -551,7 +550,7 @@ func (p *GyingPlugin) loadAllUsers() {
// scraper实例将在initDefaultAccounts中通过重新登录获取
p.users.Store(user.Hash, &user)
count++
if DebugLog {
hasPassword := "无"
if user.EncryptedPassword != "" {
@@ -568,7 +567,7 @@ func (p *GyingPlugin) loadAllUsers() {
// 包括1. DefaultAccounts代码配置 2. 从文件加载的用户(使用加密密码重新登录)
func (p *GyingPlugin) initDefaultAccounts() {
fmt.Printf("[Gying] ========== 异步初始化所有账户 ==========\n")
// 步骤1处理DefaultAccounts代码中配置的默认账户
for i, account := range DefaultAccounts {
if DebugLog {
@@ -577,7 +576,7 @@ func (p *GyingPlugin) initDefaultAccounts() {
p.initOrRestoreUser(account.Username, account.Password, "default")
}
// 步骤2遍历所有已加载的用户恢复没有scraper的用户
var usersToRestore []*User
p.users.Range(func(key, value interface{}) bool {
@@ -589,21 +588,21 @@ func (p *GyingPlugin) initDefaultAccounts() {
}
return true
})
if len(usersToRestore) > 0 {
fmt.Printf("[Gying] 发现 %d 个需要恢复的用户(使用加密密码重新登录)\n", len(usersToRestore))
for i, user := range usersToRestore {
if DebugLog {
fmt.Printf("[Gying] [恢复用户 %d/%d] 处理: %s\n", i+1, len(usersToRestore), user.UsernameMasked)
}
// 解密密码
password, err := p.decryptPassword(user.EncryptedPassword)
if err != nil {
fmt.Printf("[Gying] ❌ 用户 %s 解密密码失败: %v\n", user.UsernameMasked, err)
continue
}
p.initOrRestoreUser(user.Username, password, "restore")
}
}
@@ -614,7 +613,7 @@ func (p *GyingPlugin) initDefaultAccounts() {
// initOrRestoreUser 初始化或恢复单个用户(登录并保存)
func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
hash := p.generateHash(username)
// 检查scraper是否已存在
_, scraperExists := p.scrapers.Load(hash)
if scraperExists {
@@ -623,7 +622,7 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
}
return
}
// 登录
if DebugLog {
fmt.Printf("[Gying] 开始登录账户: %s\n", username)
@@ -644,7 +643,7 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
fmt.Printf("[Gying] ❌ 加密密码失败: %v\n", err)
return
}
// 保存用户
user := &User{
Hash: hash,
@@ -658,10 +657,10 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
ExpireAt: time.Now().AddDate(0, 4, 0), // 121天有效期
LastAccessAt: time.Now(),
}
// 保存scraper实例到内存
p.scrapers.Store(hash, scraper)
if err := p.saveUser(user); err != nil {
fmt.Printf("[Gying] ❌ 保存账户失败: %v\n", err)
return
@@ -705,7 +704,7 @@ func (p *GyingPlugin) deleteUser(hash string) error {
// getActiveUsers 获取有效用户
func (p *GyingPlugin) getActiveUsers() []*User {
var users []*User
p.users.Range(func(key, value interface{}) bool {
user := value.(*User)
if user.Status == "active" && user.Cookie != "" {
@@ -713,7 +712,7 @@ func (p *GyingPlugin) getActiveUsers() []*User {
}
return true
})
return users
}
@@ -794,13 +793,13 @@ func (p *GyingPlugin) handleGetStatus(c *gin.Context, hash string) {
}
respondSuccess(c, "获取成功", gin.H{
"hash": hash,
"logged_in": loggedIn,
"status": user.Status,
"username_masked": user.UsernameMasked,
"login_time": user.LoginAt.Format("2006-01-02 15:04:05"),
"expire_time": user.ExpireAt.Format("2006-01-02 15:04:05"),
"expires_in_days": expiresInDays,
"hash": hash,
"logged_in": loggedIn,
"status": user.Status,
"username_masked": user.UsernameMasked,
"login_time": user.LoginAt.Format("2006-01-02 15:04:05"),
"expire_time": user.ExpireAt.Format("2006-01-02 15:04:05"),
"expires_in_days": expiresInDays,
})
}
@@ -830,7 +829,7 @@ func (p *GyingPlugin) handleLogin(c *gin.Context, hash string, reqData map[strin
respondError(c, "加密密码失败: "+err.Error())
return
}
// 保存用户
user := &User{
Hash: hash,
@@ -843,7 +842,7 @@ func (p *GyingPlugin) handleLogin(c *gin.Context, hash string, reqData map[strin
ExpireAt: time.Now().AddDate(0, 4, 0), // 121天
LastAccessAt: time.Now(),
}
if _, exists := p.getUserByHash(hash); !exists {
user.CreatedAt = time.Now()
}
@@ -900,15 +899,15 @@ func (p *GyingPlugin) handleTestSearch(c *gin.Context, hash string, reqData map[
respondError(c, "用户scraper实例不存在请重新登录")
return
}
scraper, ok := scraperVal.(*cloudscraper.Scraper)
if !ok || scraper == nil {
respondError(c, "scraper实例无效请重新登录")
return
}
// 执行搜索
results, err := p.searchWithScraper(keyword, scraper)
// 执行搜索带403自动重新登录
results, err := p.searchWithScraperWithRetry(keyword, scraper, user)
if err != nil {
respondError(c, "搜索失败: "+err.Error())
return
@@ -951,27 +950,27 @@ func (p *GyingPlugin) handleTestSearch(c *gin.Context, hash string, reqData map[
func (p *GyingPlugin) encryptPassword(password string) (string, error) {
// 使用固定密钥(实际应用中可以使用配置或环境变量)
key := []byte("gying-secret-key-32bytes-long!!!") // 32字节密钥用于AES-256
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
// 创建GCM模式
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", err
}
// 生成随机nonce
nonce := make([]byte, gcm.NonceSize())
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
return "", err
}
// 加密
ciphertext := gcm.Seal(nonce, nonce, []byte(password), nil)
// 返回base64编码的密文
return base64.StdEncoding.EncodeToString(ciphertext), nil
}
@@ -980,34 +979,34 @@ func (p *GyingPlugin) encryptPassword(password string) (string, error) {
func (p *GyingPlugin) decryptPassword(encrypted string) (string, error) {
// 使用与加密相同的密钥
key := []byte("gying-secret-key-32bytes-long!!!")
// base64解码
ciphertext, err := base64.StdEncoding.DecodeString(encrypted)
if err != nil {
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", err
}
nonceSize := gcm.NonceSize()
if len(ciphertext) < nonceSize {
return "", fmt.Errorf("ciphertext too short")
}
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
plaintext, err := gcm.Open(nil, nonce, ciphertext, nil)
if err != nil {
return "", err
}
return string(plaintext), nil
}
@@ -1020,64 +1019,64 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
// 创建cloudscraper实例配置以保护cookies不被刷新
scraper, err := cloudscraper.New(
cloudscraper.WithSessionConfig(
false, // refreshOn403 = false禁用403时自动刷新
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
false, // refreshOn403 = false禁用403时自动刷新
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
),
)
if err != nil {
return nil, fmt.Errorf("创建cloudscraper失败: %w", err)
}
// 如果有保存的cookies使用反射设置到scraper的内部http.Client
if cookieStr != "" {
cookies := parseCookieString(cookieStr)
if DebugLog {
fmt.Printf("[Gying] 正在恢复 %d 个cookie到scraper实例\n", len(cookies))
}
// 使用反射访问scraper的unexported client字段
scraperValue := reflect.ValueOf(scraper).Elem()
clientField := scraperValue.FieldByName("client")
if clientField.IsValid() && !clientField.IsNil() {
// 使用反射访问client (需要使用Elem()因为是指针)
clientValue := reflect.NewAt(clientField.Type(), unsafe.Pointer(clientField.UnsafeAddr())).Elem()
client, ok := clientValue.Interface().(*http.Client)
if ok && client != nil && client.Jar != nil {
// 将cookies设置到cookiejar
// 注意必须使用正确的URL和cookie属性
gyingURL, _ := url.Parse("https://www.gying.net")
var httpCookies []*http.Cookie
for name, value := range cookies {
cookie := &http.Cookie{
Name: name,
Value: value,
Name: name,
Value: value,
// 不设置Domain和Path让cookiejar根据URL自动推导
// cookiejar.SetCookies会根据提供的URL自动设置正确的Domain和Path
}
httpCookies = append(httpCookies, cookie)
if DebugLog {
fmt.Printf("[Gying] 准备恢复Cookie: %s=%s\n",
fmt.Printf("[Gying] 准备恢复Cookie: %s=%s\n",
cookie.Name, cookie.Value[:min(10, len(cookie.Value))])
}
}
client.Jar.SetCookies(gyingURL, httpCookies)
// 验证cookies是否被正确设置
if DebugLog {
storedCookies := client.Jar.Cookies(gyingURL)
fmt.Printf("[Gying] ✅ 成功恢复 %d 个cookie到scraper的cookiejar\n", len(cookies))
fmt.Printf("[Gying] 验证: cookiejar中现有 %d 个cookie\n", len(storedCookies))
// 详细打印每个cookie以便调试
// 详细打印每个cookie以便调试
for i, c := range storedCookies {
fmt.Printf("[Gying] 设置后Cookie[%d]: %s=%s (Domain:%s, Path:%s)\n",
fmt.Printf("[Gying] 设置后Cookie[%d]: %s=%s (Domain:%s, Path:%s)\n",
i, c.Name, c.Value[:min(10, len(c.Value))], c.Domain, c.Path)
}
}
@@ -1092,7 +1091,7 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
}
}
}
return scraper, nil
}
@@ -1100,7 +1099,7 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
func parseCookieString(cookieStr string) map[string]string {
cookies := make(map[string]string)
parts := strings.Split(cookieStr, ";")
for _, part := range parts {
part = strings.TrimSpace(part)
if idx := strings.Index(part, "="); idx > 0 {
@@ -1109,18 +1108,18 @@ func parseCookieString(cookieStr string) map[string]string {
cookies[name] = value
}
}
return cookies
}
// ============ 登录逻辑 ============
// doLogin 执行登录返回scraper实例和cookie字符串
//
//
// 登录流程3步
// 1. GET登录页 (https://www.gying.net/user/login/) → 获取PHPSESSID
// 2. POST登录 (https://www.gying.net/user/login) → 获取BT_auth、BT_cookietime等认证cookies
// 3. GET详情页 (https://www.gying.net/mv/wkMn) → 触发防爬cookies (vrg_sc、vrg_go等)
// 1. GET登录页 (https://www.gying.net/user/login/) → 获取PHPSESSID
// 2. POST登录 (https://www.gying.net/user/login) → 获取BT_auth、BT_cookietime等认证cookies
// 3. GET详情页 (https://www.gying.net/mv/wkMn) → 触发防爬cookies (vrg_sc、vrg_go等)
//
// 返回: (*cloudscraper.Scraper, cookie字符串, error)
func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper, string, error) {
@@ -1134,9 +1133,9 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 关键配置禁用403自动刷新,防止cookie被清空
scraper, err := cloudscraper.New(
cloudscraper.WithSessionConfig(
false, // refreshOn403 = false禁用403时自动刷新重要
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
false, // refreshOn403 = false禁用403时自动刷新重要
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
),
)
if err != nil {
@@ -1152,7 +1151,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 创建cookieMap用于收集所有cookies
cookieMap := make(map[string]string)
// ========== 步骤1: GET登录页 (获取初始PHPSESSID) ==========
loginPageURL := "https://www.gying.net/user/login/"
if DebugLog {
@@ -1172,7 +1171,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 登录页面状态码: %d\n", getResp.StatusCode)
}
// 从登录页响应中收集cookies
for _, setCookie := range getResp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1217,7 +1216,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 响应状态码: %d\n", resp.StatusCode)
}
// 从POST登录响应中收集cookies
for _, setCookie := range resp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1267,7 +1266,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 检查登录结果兼容多种类型int、float64、json.Number、string
var codeValue int
codeInterface := loginResp["code"]
switch v := codeInterface.(type) {
case int:
codeValue = v
@@ -1303,16 +1302,16 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 步骤3: GET详情页收集完整Cookie\n")
}
detailResp, err := scraper.Get("https://www.gying.net/mv/wkMn")
if err == nil {
defer detailResp.Body.Close()
ioutil.ReadAll(detailResp.Body)
if DebugLog {
fmt.Printf("[Gying] 详情页状态码: %d\n", detailResp.StatusCode)
}
// 从详情页响应中收集cookies
for _, setCookie := range detailResp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1333,14 +1332,14 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
}
}
}
// 构建cookie字符串
var cookieParts []string
for name, value := range cookieMap {
cookieParts = append(cookieParts, fmt.Sprintf("%s=%s", name, value))
}
cookieStr := strings.Join(cookieParts, "; ")
if DebugLog {
fmt.Printf("[Gying] ✅ 登录成功!提取到 %d 个Cookie\n", len(cookieMap))
fmt.Printf("[Gying] Cookie字符串长度: %d\n", len(cookieStr))
@@ -1373,7 +1372,7 @@ func (p *GyingPlugin) reloginUser(user *User) error {
if DebugLog {
fmt.Printf("[Gying] 🔄 开始重新登录用户: %s\n", user.UsernameMasked)
}
// 解密密码
password, err := p.decryptPassword(user.EncryptedPassword)
if err != nil {
@@ -1382,7 +1381,7 @@ func (p *GyingPlugin) reloginUser(user *User) error {
}
return fmt.Errorf("解密密码失败: %w", err)
}
// 执行登录
scraper, cookie, err := p.doLogin(user.Username, password)
if err != nil {
@@ -1391,26 +1390,26 @@ func (p *GyingPlugin) reloginUser(user *User) error {
}
return fmt.Errorf("重新登录失败: %w", err)
}
// 更新scraper实例
p.scrapers.Store(user.Hash, scraper)
// 更新用户信息
user.Cookie = cookie
user.LoginAt = time.Now()
user.ExpireAt = time.Now().AddDate(0, 4, 0)
user.Status = "active"
if err := p.saveUser(user); err != nil {
if DebugLog {
fmt.Printf("[Gying] ⚠️ 保存用户失败: %v\n", err)
}
}
if DebugLog {
fmt.Printf("[Gying] ✅ 用户 %s 重新登录成功\n", user.UsernameMasked)
}
return nil
}
@@ -1430,18 +1429,18 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
// 获取用户的scraper实例
scraperVal, exists := p.scrapers.Load(u.Hash)
var scraper *cloudscraper.Scraper
if !exists {
if DebugLog {
fmt.Printf("[Gying] 用户 %s 没有scraper实例尝试使用已保存的cookie创建\n", u.UsernameMasked)
}
// 为用户创建新的cloudscraper实例禁用403自动刷新
newScraper, err := cloudscraper.New(
cloudscraper.WithSessionConfig(
false, // refreshOn403 = false
365*24*time.Hour, // interval = 1年
0, // maxRetries = 0
false, // refreshOn403 = false
365*24*time.Hour, // interval = 1年
0, // maxRetries = 0
),
)
if err != nil {
@@ -1450,11 +1449,11 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
}
return
}
// 存储新创建的scraper实例
p.scrapers.Store(u.Hash, newScraper)
scraper = newScraper
if DebugLog {
fmt.Printf("[Gying] 已为用户 %s 创建新的scraper实例已禁用403刷新\n", u.UsernameMasked)
}
@@ -1492,13 +1491,13 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
// searchWithScraperWithRetry 使用scraper搜索带403自动重新登录重试
func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *cloudscraper.Scraper, user *User) ([]model.SearchResult, error) {
results, err := p.searchWithScraper(keyword, scraper)
// 检测是否为403错误
if err != nil && strings.Contains(err.Error(), "403") {
if DebugLog {
fmt.Printf("[Gying] ⚠️ 检测到403错误尝试重新登录用户 %s\n", user.UsernameMasked)
}
// 尝试重新登录
if reloginErr := p.reloginUser(user); reloginErr != nil {
if DebugLog {
@@ -1506,18 +1505,18 @@ func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *clouds
}
return nil, fmt.Errorf("403错误且重新登录失败: %w", reloginErr)
}
// 获取新的scraper实例
scraperVal, exists := p.scrapers.Load(user.Hash)
if !exists {
return nil, fmt.Errorf("重新登录后未找到scraper实例")
}
newScraper, ok := scraperVal.(*cloudscraper.Scraper)
if !ok || newScraper == nil {
return nil, fmt.Errorf("重新登录后scraper实例无效")
}
// 使用新scraper重试搜索
if DebugLog {
fmt.Printf("[Gying] 🔄 使用新登录状态重试搜索\n")
@@ -1527,7 +1526,7 @@ func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *clouds
return nil, fmt.Errorf("重新登录后搜索仍然失败: %w", err)
}
}
return results, err
}
@@ -1540,7 +1539,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
// 1. 使用cloudscraper请求搜索页面
searchURL := fmt.Sprintf("https://www.gying.net/s/1---1/%s", url.QueryEscape(keyword))
if DebugLog {
fmt.Printf("[Gying] 搜索URL: %s\n", searchURL)
fmt.Printf("[Gying] 使用cloudscraper发送请求\n")
@@ -1558,7 +1557,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
if DebugLog {
fmt.Printf("[Gying] 搜索响应状态码: %d\n", resp.StatusCode)
}
// 读取响应body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
@@ -1579,7 +1578,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
fmt.Printf("[Gying] 响应预览: %s\n", preview)
}
}
// 检查403错误
if resp.StatusCode == 403 {
if DebugLog {
@@ -1598,7 +1597,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
// 2. 提取 _obj.search JSON
re := regexp.MustCompile(`_obj\.search=(\{.*?\});`)
matches := re.FindSubmatch(body)
if DebugLog {
fmt.Printf("[Gying] 正则匹配结果: 找到 %d 个匹配\n", len(matches))
}
@@ -1646,18 +1645,25 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
}
// 3. 并发请求详情接口
results := p.fetchAllDetails(&searchData, scraper)
results, err := p.fetchAllDetails(&searchData, scraper)
if err != nil {
if DebugLog {
fmt.Printf("[Gying] fetchAllDetails 失败: %v\n", err)
fmt.Printf("[Gying] ---------- searchWithScraper 结束 ----------\n")
}
return nil, err
}
if DebugLog {
fmt.Printf("[Gying] fetchAllDetails 返回 %d 条结果\n", len(results))
fmt.Printf("[Gying] ---------- searchWithCookie 结束 ----------\n")
fmt.Printf("[Gying] ---------- searchWithScraper 结束 ----------\n")
}
return results, nil
}
// fetchAllDetails 并发获取所有详情
func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscraper.Scraper) []model.SearchResult {
func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscraper.Scraper) ([]model.SearchResult, error) {
if DebugLog {
fmt.Printf("[Gying] >>> fetchAllDetails 开始\n")
fmt.Printf("[Gying] 需要获取 %d 个详情\n", len(searchData.L.I))
@@ -1668,9 +1674,11 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
var wg sync.WaitGroup
semaphore := make(chan struct{}, MaxConcurrentDetails)
errChan := make(chan error, 1) // 用于接收403错误
successCount := 0
failCount := 0
has403 := false
for i := 0; i < len(searchData.L.I); i++ {
wg.Add(1)
@@ -1680,8 +1688,16 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
semaphore <- struct{}{}
defer func() { <-semaphore }()
// 检查是否已经遇到403错误
mu.Lock()
if has403 {
mu.Unlock()
return
}
mu.Unlock()
if DebugLog {
fmt.Printf("[Gying] [%d/%d] 获取详情: ID=%s, Type=%s\n",
fmt.Printf("[Gying] [%d/%d] 获取详情: ID=%s, Type=%s\n",
index+1, len(searchData.L.I), searchData.L.I[index], searchData.L.D[index])
}
@@ -1690,6 +1706,20 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ❌ 获取详情失败: %v\n", index+1, len(searchData.L.I), err)
}
// 检查是否是403错误
if strings.Contains(err.Error(), "403") {
mu.Lock()
if !has403 {
has403 = true
select {
case errChan <- err:
default:
}
}
mu.Unlock()
}
mu.Lock()
failCount++
mu.Unlock()
@@ -1699,7 +1729,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
result := p.buildResult(detail, searchData, index)
if result.Title != "" && len(result.Links) > 0 {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ✅ 成功: %s (%d个链接)\n",
fmt.Printf("[Gying] [%d/%d] ✅ 成功: %s (%d个链接)\n",
index+1, len(searchData.L.I), result.Title, len(result.Links))
}
mu.Lock()
@@ -1708,7 +1738,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
mu.Unlock()
} else {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ⚠️ 跳过: 标题或链接为空 (标题:%s, 链接数:%d)\n",
fmt.Printf("[Gying] [%d/%d] ⚠️ 跳过: 标题或链接为空 (标题:%s, 链接数:%d)\n",
index+1, len(searchData.L.I), result.Title, len(result.Links))
}
}
@@ -1717,18 +1747,28 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
wg.Wait()
// 检查是否有403错误
select {
case err := <-errChan:
if DebugLog {
fmt.Printf("[Gying] <<< fetchAllDetails 检测到403错误需要重新登录\n")
}
return nil, err
default:
}
if DebugLog {
fmt.Printf("[Gying] <<< fetchAllDetails 完成: 成功=%d, 失败=%d, 总计=%d\n",
fmt.Printf("[Gying] <<< fetchAllDetails 完成: 成功=%d, 失败=%d, 总计=%d\n",
successCount, failCount, len(searchData.L.I))
}
return results
return results, nil
}
// fetchDetail 获取详情
func (p *GyingPlugin) fetchDetail(resourceID, resourceType string, scraper *cloudscraper.Scraper) (*DetailData, error) {
detailURL := fmt.Sprintf("https://www.gying.net/res/downurl/%s/%s", resourceType, resourceID)
if DebugLog {
fmt.Printf("[Gying] fetchDetail: %s\n", detailURL)
}
@@ -1793,6 +1833,14 @@ func (p *GyingPlugin) fetchDetail(resourceID, resourceType string, scraper *clou
fmt.Printf("[Gying] 详情Code: %d, 网盘链接数: %d\n", detail.Code, len(detail.Panlist.URL))
}
// 检查JSON响应中的code字段关键
if detail.Code == 403 {
if DebugLog {
fmt.Printf("[Gying] ❌ 详情接口返回Code=403 - 登录状态可能已失效\n")
}
return nil, fmt.Errorf("Detail API returned code 403 - authentication may have expired")
}
return &detail, nil
}
@@ -1845,7 +1893,7 @@ func (p *GyingPlugin) extractPanLinks(detail *DetailData) []model.Link {
for i := 0; i < len(detail.Panlist.URL); i++ {
linkURL := strings.TrimSpace(detail.Panlist.URL[i])
// 去除URL中的访问码标记
linkURL = regexp.MustCompile(`(访问码:.*?`).ReplaceAllString(linkURL, "")
linkURL = regexp.MustCompile(`\(访问码:.*?\)`).ReplaceAllString(linkURL, "")
@@ -1867,7 +1915,7 @@ func (p *GyingPlugin) extractPanLinks(detail *DetailData) []model.Link {
if i < len(detail.Panlist.P) && detail.Panlist.P[i] != "" {
password = detail.Panlist.P[i]
}
// 从URL提取提取码优先
if urlPwd := p.extractPasswordFromURL(linkURL); urlPwd != "" {
password = urlPwd
@@ -1916,7 +1964,7 @@ func (p *GyingPlugin) extractPasswordFromURL(linkURL string) string {
return matches[1]
}
}
// 115网盘: ?password=xxxx
if strings.Contains(linkURL, "?password=") {
re := regexp.MustCompile(`\?password=([a-zA-Z0-9]+)`)
@@ -2109,3 +2157,4 @@ func (p *GyingPlugin) markInactiveUsers() int {
return markedCount
}