优化gying精确搜索

This commit is contained in:
www.xueximeng.com
2025-10-31 10:39:12 +08:00
parent a4acd8f9d6
commit cbdfc65d2f

View File

@@ -27,15 +27,15 @@ import (
"pansou/model"
"pansou/plugin"
"pansou/util/json"
cloudscraper "github.com/Advik-B/cloudscraper/lib"
)
// 插件配置参数
const (
MaxConcurrentUsers = 10 // 最多使用的用户数
MaxConcurrentDetails = 50 // 最大并发详情请求数
DebugLog = false // 调试日志开关排查问题时改为true
MaxConcurrentUsers = 10 // 最多使用的用户数
MaxConcurrentDetails = 50 // 最大并发详情请求数
DebugLog = false // 调试日志开关排查问题时改为true
)
// 默认账户配置可通过Web界面添加更多账户
@@ -57,9 +57,9 @@ func init() {
if cachePath == "" {
cachePath = "./cache"
}
StorageDir = filepath.Join(cachePath, "gying_users")
if err := os.MkdirAll(StorageDir, 0755); err != nil {
fmt.Printf("⚠️ 警告: 无法创建Gying存储目录 %s: %v\n", StorageDir, err)
} else {
@@ -382,9 +382,9 @@ const HTMLTemplate = `<!DOCTYPE html>
// GyingPlugin 插件结构
type GyingPlugin struct {
*plugin.BaseAsyncPlugin
users sync.Map // 内存缓存hash -> *User
scrapers sync.Map // cloudscraper实例缓存hash -> *cloudscraper.Scraper
mu sync.RWMutex
users sync.Map // 内存缓存hash -> *User
scrapers sync.Map // cloudscraper实例缓存hash -> *cloudscraper.Scraper
mu sync.RWMutex
searchCache sync.Map // 插件级缓存:关键词->model.PluginSearchResult
}
@@ -420,8 +420,8 @@ type SearchData struct {
// DetailData 详情接口JSON数据结构
type DetailData struct {
Code int `json:"code"`
WP bool `json:"wp"`
Code int `json:"code"`
WP bool `json:"wp"`
Panlist struct {
ID []string `json:"id"`
Name []string `json:"name"`
@@ -457,7 +457,7 @@ func init() {
// 启动定期清理任务
go p.startCleanupTask()
// 启动session保活任务防止session超时
go p.startSessionKeepAlive()
@@ -471,7 +471,7 @@ func (p *GyingPlugin) RegisterWebRoutes(router *gin.RouterGroup) {
gying := router.Group("/gying")
gying.GET("/:param", p.handleManagePage)
gying.POST("/:param", p.handleManagePagePOST)
fmt.Printf("[Gying] Web路由已注册: /gying/:param\n")
}
@@ -580,7 +580,7 @@ func (p *GyingPlugin) loadAllUsers() {
// scraper实例将在initDefaultAccounts中通过重新登录获取
p.users.Store(user.Hash, &user)
count++
if DebugLog {
hasPassword := "无"
if user.EncryptedPassword != "" {
@@ -596,8 +596,8 @@ func (p *GyingPlugin) loadAllUsers() {
// initDefaultAccounts 初始化所有账户(异步执行,不阻塞启动)
// 包括1. DefaultAccounts代码配置 2. 从文件加载的用户(使用加密密码重新登录)
func (p *GyingPlugin) initDefaultAccounts() {
fmt.Printf("[Gying] ========== 异步初始化所有账户 ==========\n")
// fmt.Printf("[Gying] ========== 异步初始化所有账户 ==========\n")
// 步骤1处理DefaultAccounts代码中配置的默认账户
for i, account := range DefaultAccounts {
if DebugLog {
@@ -606,7 +606,7 @@ func (p *GyingPlugin) initDefaultAccounts() {
p.initOrRestoreUser(account.Username, account.Password, "default")
}
// 步骤2遍历所有已加载的用户恢复没有scraper的用户
var usersToRestore []*User
p.users.Range(func(key, value interface{}) bool {
@@ -618,32 +618,32 @@ func (p *GyingPlugin) initDefaultAccounts() {
}
return true
})
if len(usersToRestore) > 0 {
fmt.Printf("[Gying] 发现 %d 个需要恢复的用户(使用加密密码重新登录)\n", len(usersToRestore))
for i, user := range usersToRestore {
if DebugLog {
fmt.Printf("[Gying] [恢复用户 %d/%d] 处理: %s\n", i+1, len(usersToRestore), user.UsernameMasked)
}
// 解密密码
password, err := p.decryptPassword(user.EncryptedPassword)
if err != nil {
fmt.Printf("[Gying] ❌ 用户 %s 解密密码失败: %v\n", user.UsernameMasked, err)
continue
}
p.initOrRestoreUser(user.Username, password, "restore")
}
}
fmt.Printf("[Gying] ========== 所有账户初始化完成 ==========\n")
// fmt.Printf("[Gying] ========== 所有账户初始化完成 ==========\n")
}
// initOrRestoreUser 初始化或恢复单个用户(登录并保存)
func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
hash := p.generateHash(username)
// 检查scraper是否已存在
_, scraperExists := p.scrapers.Load(hash)
if scraperExists {
@@ -652,7 +652,7 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
}
return
}
// 登录
if DebugLog {
fmt.Printf("[Gying] 开始登录账户: %s\n", username)
@@ -673,7 +673,7 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
fmt.Printf("[Gying] ❌ 加密密码失败: %v\n", err)
return
}
// 保存用户
user := &User{
Hash: hash,
@@ -687,10 +687,10 @@ func (p *GyingPlugin) initOrRestoreUser(username, password, source string) {
ExpireAt: time.Now().AddDate(0, 4, 0), // 121天有效期
LastAccessAt: time.Now(),
}
// 保存scraper实例到内存
p.scrapers.Store(hash, scraper)
if err := p.saveUser(user); err != nil {
fmt.Printf("[Gying] ❌ 保存账户失败: %v\n", err)
return
@@ -734,7 +734,7 @@ func (p *GyingPlugin) deleteUser(hash string) error {
// getActiveUsers 获取有效用户
func (p *GyingPlugin) getActiveUsers() []*User {
var users []*User
p.users.Range(func(key, value interface{}) bool {
user := value.(*User)
if user.Status == "active" && user.Cookie != "" {
@@ -742,7 +742,7 @@ func (p *GyingPlugin) getActiveUsers() []*User {
}
return true
})
return users
}
@@ -823,13 +823,13 @@ func (p *GyingPlugin) handleGetStatus(c *gin.Context, hash string) {
}
respondSuccess(c, "获取成功", gin.H{
"hash": hash,
"logged_in": loggedIn,
"status": user.Status,
"username_masked": user.UsernameMasked,
"login_time": user.LoginAt.Format("2006-01-02 15:04:05"),
"expire_time": user.ExpireAt.Format("2006-01-02 15:04:05"),
"expires_in_days": expiresInDays,
"hash": hash,
"logged_in": loggedIn,
"status": user.Status,
"username_masked": user.UsernameMasked,
"login_time": user.LoginAt.Format("2006-01-02 15:04:05"),
"expire_time": user.ExpireAt.Format("2006-01-02 15:04:05"),
"expires_in_days": expiresInDays,
})
}
@@ -859,7 +859,7 @@ func (p *GyingPlugin) handleLogin(c *gin.Context, hash string, reqData map[strin
respondError(c, "加密密码失败: "+err.Error())
return
}
// 保存用户
user := &User{
Hash: hash,
@@ -872,7 +872,7 @@ func (p *GyingPlugin) handleLogin(c *gin.Context, hash string, reqData map[strin
ExpireAt: time.Now().AddDate(0, 4, 0), // 121天
LastAccessAt: time.Now(),
}
if _, exists := p.getUserByHash(hash); !exists {
user.CreatedAt = time.Now()
}
@@ -929,13 +929,13 @@ func (p *GyingPlugin) handleTestSearch(c *gin.Context, hash string, reqData map[
respondError(c, "用户scraper实例不存在请重新登录")
return
}
scraper, ok := scraperVal.(*cloudscraper.Scraper)
if !ok || scraper == nil {
respondError(c, "scraper实例无效请重新登录")
return
}
// 执行搜索带403自动重新登录
results, err := p.searchWithScraperWithRetry(keyword, scraper, user)
if err != nil {
@@ -980,27 +980,27 @@ func (p *GyingPlugin) handleTestSearch(c *gin.Context, hash string, reqData map[
func (p *GyingPlugin) encryptPassword(password string) (string, error) {
// 使用固定密钥(实际应用中可以使用配置或环境变量)
key := []byte("gying-secret-key-32bytes-long!!!") // 32字节密钥用于AES-256
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
// 创建GCM模式
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", err
}
// 生成随机nonce
nonce := make([]byte, gcm.NonceSize())
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
return "", err
}
// 加密
ciphertext := gcm.Seal(nonce, nonce, []byte(password), nil)
// 返回base64编码的密文
return base64.StdEncoding.EncodeToString(ciphertext), nil
}
@@ -1009,34 +1009,34 @@ func (p *GyingPlugin) encryptPassword(password string) (string, error) {
func (p *GyingPlugin) decryptPassword(encrypted string) (string, error) {
// 使用与加密相同的密钥
key := []byte("gying-secret-key-32bytes-long!!!")
// base64解码
ciphertext, err := base64.StdEncoding.DecodeString(encrypted)
if err != nil {
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", err
}
nonceSize := gcm.NonceSize()
if len(ciphertext) < nonceSize {
return "", fmt.Errorf("ciphertext too short")
}
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
plaintext, err := gcm.Open(nil, nonce, ciphertext, nil)
if err != nil {
return "", err
}
return string(plaintext), nil
}
@@ -1049,64 +1049,64 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
// 创建cloudscraper实例配置以保护cookies不被刷新
scraper, err := cloudscraper.New(
cloudscraper.WithSessionConfig(
false, // refreshOn403 = false禁用403时自动刷新
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
false, // refreshOn403 = false禁用403时自动刷新
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
),
)
if err != nil {
return nil, fmt.Errorf("创建cloudscraper失败: %w", err)
}
// 如果有保存的cookies使用反射设置到scraper的内部http.Client
if cookieStr != "" {
cookies := parseCookieString(cookieStr)
if DebugLog {
fmt.Printf("[Gying] 正在恢复 %d 个cookie到scraper实例\n", len(cookies))
}
// 使用反射访问scraper的unexported client字段
scraperValue := reflect.ValueOf(scraper).Elem()
clientField := scraperValue.FieldByName("client")
if clientField.IsValid() && !clientField.IsNil() {
// 使用反射访问client (需要使用Elem()因为是指针)
clientValue := reflect.NewAt(clientField.Type(), unsafe.Pointer(clientField.UnsafeAddr())).Elem()
client, ok := clientValue.Interface().(*http.Client)
if ok && client != nil && client.Jar != nil {
// 将cookies设置到cookiejar
// 注意必须使用正确的URL和cookie属性
gyingURL, _ := url.Parse("https://www.gying.net")
var httpCookies []*http.Cookie
for name, value := range cookies {
cookie := &http.Cookie{
Name: name,
Value: value,
Name: name,
Value: value,
// 不设置Domain和Path让cookiejar根据URL自动推导
// cookiejar.SetCookies会根据提供的URL自动设置正确的Domain和Path
}
httpCookies = append(httpCookies, cookie)
if DebugLog {
fmt.Printf("[Gying] 准备恢复Cookie: %s=%s\n",
fmt.Printf("[Gying] 准备恢复Cookie: %s=%s\n",
cookie.Name, cookie.Value[:min(10, len(cookie.Value))])
}
}
client.Jar.SetCookies(gyingURL, httpCookies)
// 验证cookies是否被正确设置
if DebugLog {
storedCookies := client.Jar.Cookies(gyingURL)
fmt.Printf("[Gying] ✅ 成功恢复 %d 个cookie到scraper的cookiejar\n", len(cookies))
fmt.Printf("[Gying] 验证: cookiejar中现有 %d 个cookie\n", len(storedCookies))
// 详细打印每个cookie以便调试
// 详细打印每个cookie以便调试
for i, c := range storedCookies {
fmt.Printf("[Gying] 设置后Cookie[%d]: %s=%s (Domain:%s, Path:%s)\n",
fmt.Printf("[Gying] 设置后Cookie[%d]: %s=%s (Domain:%s, Path:%s)\n",
i, c.Name, c.Value[:min(10, len(c.Value))], c.Domain, c.Path)
}
}
@@ -1121,7 +1121,7 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
}
}
}
return scraper, nil
}
@@ -1129,7 +1129,7 @@ func (p *GyingPlugin) createScraperWithCookies(cookieStr string) (*cloudscraper.
func parseCookieString(cookieStr string) map[string]string {
cookies := make(map[string]string)
parts := strings.Split(cookieStr, ";")
for _, part := range parts {
part = strings.TrimSpace(part)
if idx := strings.Index(part, "="); idx > 0 {
@@ -1138,18 +1138,18 @@ func parseCookieString(cookieStr string) map[string]string {
cookies[name] = value
}
}
return cookies
}
// ============ 登录逻辑 ============
// doLogin 执行登录返回scraper实例和cookie字符串
//
//
// 登录流程3步
// 1. GET登录页 (https://www.gying.net/user/login/) → 获取PHPSESSID
// 2. POST登录 (https://www.gying.net/user/login) → 获取BT_auth、BT_cookietime等认证cookies
// 3. GET详情页 (https://www.gying.net/mv/wkMn) → 触发防爬cookies (vrg_sc、vrg_go等)
// 1. GET登录页 (https://www.gying.net/user/login/) → 获取PHPSESSID
// 2. POST登录 (https://www.gying.net/user/login) → 获取BT_auth、BT_cookietime等认证cookies
// 3. GET详情页 (https://www.gying.net/mv/wkMn) → 触发防爬cookies (vrg_sc、vrg_go等)
//
// 返回: (*cloudscraper.Scraper, cookie字符串, error)
func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper, string, error) {
@@ -1163,9 +1163,9 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 关键配置禁用403自动刷新,防止cookie被清空
scraper, err := cloudscraper.New(
cloudscraper.WithSessionConfig(
false, // refreshOn403 = false禁用403时自动刷新重要
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
false, // refreshOn403 = false禁用403时自动刷新重要
365*24*time.Hour, // interval = 1年基本不刷新
0, // maxRetries = 0
),
)
if err != nil {
@@ -1181,7 +1181,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 创建cookieMap用于收集所有cookies
cookieMap := make(map[string]string)
// ========== 步骤1: GET登录页 (获取初始PHPSESSID) ==========
loginPageURL := "https://www.gying.net/user/login/"
if DebugLog {
@@ -1201,7 +1201,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 登录页面状态码: %d\n", getResp.StatusCode)
}
// 从登录页响应中收集cookies
for _, setCookie := range getResp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1246,7 +1246,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 响应状态码: %d\n", resp.StatusCode)
}
// 从POST登录响应中收集cookies
for _, setCookie := range resp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1296,7 +1296,7 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
// 检查登录结果兼容多种类型int、float64、json.Number、string
var codeValue int
codeInterface := loginResp["code"]
switch v := codeInterface.(type) {
case int:
codeValue = v
@@ -1332,16 +1332,16 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
if DebugLog {
fmt.Printf("[Gying] 步骤3: GET详情页收集完整Cookie\n")
}
detailResp, err := scraper.Get("https://www.gying.net/mv/wkMn")
if err == nil {
defer detailResp.Body.Close()
ioutil.ReadAll(detailResp.Body)
if DebugLog {
fmt.Printf("[Gying] 详情页状态码: %d\n", detailResp.StatusCode)
}
// 从详情页响应中收集cookies
for _, setCookie := range detailResp.Header["Set-Cookie"] {
parts := strings.Split(setCookie, ";")
@@ -1362,14 +1362,14 @@ func (p *GyingPlugin) doLogin(username, password string) (*cloudscraper.Scraper,
}
}
}
// 构建cookie字符串
var cookieParts []string
for name, value := range cookieMap {
cookieParts = append(cookieParts, fmt.Sprintf("%s=%s", name, value))
}
cookieStr := strings.Join(cookieParts, "; ")
if DebugLog {
fmt.Printf("[Gying] ✅ 登录成功!提取到 %d 个Cookie\n", len(cookieMap))
fmt.Printf("[Gying] Cookie字符串长度: %d\n", len(cookieStr))
@@ -1402,7 +1402,7 @@ func (p *GyingPlugin) reloginUser(user *User) error {
if DebugLog {
fmt.Printf("[Gying] 🔄 开始重新登录用户: %s\n", user.UsernameMasked)
}
// 解密密码
password, err := p.decryptPassword(user.EncryptedPassword)
if err != nil {
@@ -1411,7 +1411,7 @@ func (p *GyingPlugin) reloginUser(user *User) error {
}
return fmt.Errorf("解密密码失败: %w", err)
}
// 执行登录
scraper, cookie, err := p.doLogin(user.Username, password)
if err != nil {
@@ -1420,26 +1420,26 @@ func (p *GyingPlugin) reloginUser(user *User) error {
}
return fmt.Errorf("重新登录失败: %w", err)
}
// 更新scraper实例
p.scrapers.Store(user.Hash, scraper)
// 更新用户信息
user.Cookie = cookie
user.LoginAt = time.Now()
user.ExpireAt = time.Now().AddDate(0, 4, 0)
user.Status = "active"
if err := p.saveUser(user); err != nil {
if DebugLog {
fmt.Printf("[Gying] ⚠️ 保存用户失败: %v\n", err)
}
}
if DebugLog {
fmt.Printf("[Gying] ✅ 用户 %s 重新登录成功\n", user.UsernameMasked)
}
return nil
}
@@ -1459,12 +1459,12 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
// 获取用户的scraper实例
scraperVal, exists := p.scrapers.Load(u.Hash)
var scraper *cloudscraper.Scraper
if !exists {
if DebugLog {
fmt.Printf("[Gying] 用户 %s 没有scraper实例尝试使用已保存的cookie创建\n", u.UsernameMasked)
}
// 使用已保存的cookie创建scraper实例关键
newScraper, err := p.createScraperWithCookies(u.Cookie)
if err != nil {
@@ -1473,11 +1473,11 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
}
return
}
// 存储新创建的scraper实例
p.scrapers.Store(u.Hash, newScraper)
scraper = newScraper
if DebugLog {
fmt.Printf("[Gying] 已为用户 %s 恢复scraper实例含cookie\n", u.UsernameMasked)
}
@@ -1515,13 +1515,13 @@ func (p *GyingPlugin) executeSearchTasks(users []*User, keyword string) []model.
// searchWithScraperWithRetry 使用scraper搜索带403自动重新登录重试
func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *cloudscraper.Scraper, user *User) ([]model.SearchResult, error) {
results, err := p.searchWithScraper(keyword, scraper)
// 检测是否为403错误
if err != nil && strings.Contains(err.Error(), "403") {
if DebugLog {
fmt.Printf("[Gying] ⚠️ 检测到403错误尝试重新登录用户 %s\n", user.UsernameMasked)
}
// 尝试重新登录
if reloginErr := p.reloginUser(user); reloginErr != nil {
if DebugLog {
@@ -1529,18 +1529,18 @@ func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *clouds
}
return nil, fmt.Errorf("403错误且重新登录失败: %w", reloginErr)
}
// 获取新的scraper实例
scraperVal, exists := p.scrapers.Load(user.Hash)
if !exists {
return nil, fmt.Errorf("重新登录后未找到scraper实例")
}
newScraper, ok := scraperVal.(*cloudscraper.Scraper)
if !ok || newScraper == nil {
return nil, fmt.Errorf("重新登录后scraper实例无效")
}
// 使用新scraper重试搜索
if DebugLog {
fmt.Printf("[Gying] 🔄 使用新登录状态重试搜索\n")
@@ -1550,7 +1550,7 @@ func (p *GyingPlugin) searchWithScraperWithRetry(keyword string, scraper *clouds
return nil, fmt.Errorf("重新登录后搜索仍然失败: %w", err)
}
}
return results, err
}
@@ -1563,7 +1563,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
// 1. 使用cloudscraper请求搜索页面
searchURL := fmt.Sprintf("https://www.gying.net/s/1---1/%s", url.QueryEscape(keyword))
if DebugLog {
fmt.Printf("[Gying] 搜索URL: %s\n", searchURL)
fmt.Printf("[Gying] 使用cloudscraper发送请求\n")
@@ -1581,7 +1581,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
if DebugLog {
fmt.Printf("[Gying] 搜索响应状态码: %d\n", resp.StatusCode)
}
// 读取响应body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
@@ -1602,7 +1602,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
fmt.Printf("[Gying] 响应预览: %s\n", preview)
}
}
// 检查403错误
if resp.StatusCode == 403 {
if DebugLog {
@@ -1621,7 +1621,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
// 2. 提取 _obj.search JSON
re := regexp.MustCompile(`_obj\.search=(\{.*?\});`)
matches := re.FindSubmatch(body)
if DebugLog {
fmt.Printf("[Gying] 正则匹配结果: 找到 %d 个匹配\n", len(matches))
}
@@ -1679,9 +1679,9 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
fmt.Printf("[Gying] 防爬cookies刷新成功 (状态码: %d)\n", refreshResp.StatusCode)
}
}
// 4. 并发请求详情接口
results, err := p.fetchAllDetails(&searchData, scraper)
results, err := p.fetchAllDetails(&searchData, scraper, keyword)
if err != nil {
if DebugLog {
fmt.Printf("[Gying] fetchAllDetails 失败: %v\n", err)
@@ -1689,7 +1689,7 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
}
return nil, err
}
if DebugLog {
fmt.Printf("[Gying] fetchAllDetails 返回 %d 条结果\n", len(results))
fmt.Printf("[Gying] ---------- searchWithScraper 结束 ----------\n")
@@ -1699,10 +1699,10 @@ func (p *GyingPlugin) searchWithScraper(keyword string, scraper *cloudscraper.Sc
}
// fetchAllDetails 并发获取所有详情
func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscraper.Scraper) ([]model.SearchResult, error) {
func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscraper.Scraper, keyword string) ([]model.SearchResult, error) {
if DebugLog {
fmt.Printf("[Gying] >>> fetchAllDetails 开始\n")
fmt.Printf("[Gying] 需要获取 %d 个详情\n", len(searchData.L.I))
fmt.Printf("[Gying] 需要获取 %d 个详情,关键词: %s\n", len(searchData.L.I), keyword)
}
var results []model.SearchResult
@@ -1715,6 +1715,9 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
successCount := 0
failCount := 0
has403 := false
// 将关键词转为小写,用于不区分大小写的匹配
keywordLower := strings.ToLower(keyword)
for i := 0; i < len(searchData.L.I); i++ {
wg.Add(1)
@@ -1732,9 +1735,28 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
}
mu.Unlock()
// 检查标题是否包含搜索关键词
if index >= len(searchData.L.Title) {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ⏭️ 跳过: 索引超出标题数组范围\n",
index+1, len(searchData.L.I))
}
return
}
title := searchData.L.Title[index]
titleLower := strings.ToLower(title)
if !strings.Contains(titleLower, keywordLower) {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ⏭️ 跳过: 标题不包含关键词 '%s' (标题: %s)\n",
index+1, len(searchData.L.I), keyword, title)
}
return
}
if DebugLog {
fmt.Printf("[Gying] [%d/%d] 获取详情: ID=%s, Type=%s\n",
index+1, len(searchData.L.I), searchData.L.I[index], searchData.L.D[index])
fmt.Printf("[Gying] [%d/%d] 获取详情: ID=%s, Type=%s, 标题=%s\n",
index+1, len(searchData.L.I), searchData.L.I[index], searchData.L.D[index], title)
}
detail, err := p.fetchDetail(searchData.L.I[index], searchData.L.D[index], scraper)
@@ -1742,7 +1764,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ❌ 获取详情失败: %v\n", index+1, len(searchData.L.I), err)
}
// 检查是否是403错误
if strings.Contains(err.Error(), "403") {
mu.Lock()
@@ -1755,7 +1777,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
}
mu.Unlock()
}
mu.Lock()
failCount++
mu.Unlock()
@@ -1765,7 +1787,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
result := p.buildResult(detail, searchData, index)
if result.Title != "" && len(result.Links) > 0 {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ✅ 成功: %s (%d个链接)\n",
fmt.Printf("[Gying] [%d/%d] ✅ 成功: %s (%d个链接)\n",
index+1, len(searchData.L.I), result.Title, len(result.Links))
}
mu.Lock()
@@ -1774,7 +1796,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
mu.Unlock()
} else {
if DebugLog {
fmt.Printf("[Gying] [%d/%d] ⚠️ 跳过: 标题或链接为空 (标题:%s, 链接数:%d)\n",
fmt.Printf("[Gying] [%d/%d] ⚠️ 跳过: 标题或链接为空 (标题:%s, 链接数:%d)\n",
index+1, len(searchData.L.I), result.Title, len(result.Links))
}
}
@@ -1794,7 +1816,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
}
if DebugLog {
fmt.Printf("[Gying] <<< fetchAllDetails 完成: 成功=%d, 失败=%d, 总计=%d\n",
fmt.Printf("[Gying] <<< fetchAllDetails 完成: 成功=%d, 失败=%d, 总计=%d\n",
successCount, failCount, len(searchData.L.I))
}
@@ -1804,7 +1826,7 @@ func (p *GyingPlugin) fetchAllDetails(searchData *SearchData, scraper *cloudscra
// fetchDetail 获取详情
func (p *GyingPlugin) fetchDetail(resourceID, resourceType string, scraper *cloudscraper.Scraper) (*DetailData, error) {
detailURL := fmt.Sprintf("https://www.gying.net/res/downurl/%s/%s", resourceType, resourceID)
if DebugLog {
fmt.Printf("[Gying] fetchDetail: %s\n", detailURL)
}
@@ -1929,7 +1951,7 @@ func (p *GyingPlugin) extractPanLinks(detail *DetailData) []model.Link {
for i := 0; i < len(detail.Panlist.URL); i++ {
linkURL := strings.TrimSpace(detail.Panlist.URL[i])
// 去除URL中的访问码标记
linkURL = regexp.MustCompile(`(访问码:.*?`).ReplaceAllString(linkURL, "")
linkURL = regexp.MustCompile(`\(访问码:.*?\)`).ReplaceAllString(linkURL, "")
@@ -1951,7 +1973,7 @@ func (p *GyingPlugin) extractPanLinks(detail *DetailData) []model.Link {
if i < len(detail.Panlist.P) && detail.Panlist.P[i] != "" {
password = detail.Panlist.P[i]
}
// 从URL提取提取码优先
if urlPwd := p.extractPasswordFromURL(linkURL); urlPwd != "" {
password = urlPwd
@@ -1984,9 +2006,9 @@ func (p *GyingPlugin) determineLinkType(linkURL string) string {
return "tianyi"
case strings.Contains(linkURL, "115.com") || strings.Contains(linkURL, "115cdn.com") || strings.Contains(linkURL, "anxia.com"):
return "115"
case strings.Contains(linkURL, "123684.com") || strings.Contains(linkURL, "123685.com") ||
strings.Contains(linkURL, "123912.com") || strings.Contains(linkURL, "123pan.com") ||
strings.Contains(linkURL, "123pan.cn") || strings.Contains(linkURL, "123592.com"):
case strings.Contains(linkURL, "123684.com") || strings.Contains(linkURL, "123685.com") ||
strings.Contains(linkURL, "123912.com") || strings.Contains(linkURL, "123pan.com") ||
strings.Contains(linkURL, "123pan.cn") || strings.Contains(linkURL, "123592.com"):
return "123"
default:
return "others"
@@ -2002,7 +2024,7 @@ func (p *GyingPlugin) extractPasswordFromURL(linkURL string) string {
return matches[1]
}
}
// 115网盘: ?password=xxxx
if strings.Contains(linkURL, "?password=") {
re := regexp.MustCompile(`\?password=([a-zA-Z0-9]+)`)
@@ -2149,10 +2171,10 @@ func decryptCookie(encrypted string) (string, error) {
func (p *GyingPlugin) startSessionKeepAlive() {
// 首次启动后延迟3分钟再开始避免启动时过多请求
time.Sleep(3 * time.Minute)
// 立即执行一次保活
p.keepAllSessionsAlive()
// 每3分钟执行一次保活
ticker := time.NewTicker(3 * time.Minute)
for range ticker.C {
@@ -2163,26 +2185,26 @@ func (p *GyingPlugin) startSessionKeepAlive() {
// keepAllSessionsAlive 保持所有用户的session活跃
func (p *GyingPlugin) keepAllSessionsAlive() {
count := 0
p.users.Range(func(key, value interface{}) bool {
user := value.(*User)
// 只为active状态的用户保活
if user.Status != "active" {
return true
}
// 获取scraper实例
scraperVal, exists := p.scrapers.Load(user.Hash)
if !exists {
return true
}
scraper, ok := scraperVal.(*cloudscraper.Scraper)
if !ok || scraper == nil {
return true
}
// 访问首页保持session活跃
go func(s *cloudscraper.Scraper, username string) {
resp, err := s.Get("https://www.gying.net/")
@@ -2193,11 +2215,11 @@ func (p *GyingPlugin) keepAllSessionsAlive() {
}
}
}(scraper, user.UsernameMasked)
count++
return true
})
if DebugLog && count > 0 {
fmt.Printf("[Gying] 💓 已为 %d 个用户执行session保活\n", count)
}
@@ -2255,3 +2277,4 @@ func (p *GyingPlugin) markInactiveUsers() int {
return markedCount
}