feat: add ci for lint and release , fix several bugs (#5)

* feat: add save_settings_with_update_port command and update related components

* refactor: remove state parameter from API key retrieval and update related functions

* feat: enhance release workflow with tag validation and improve build scripts

* feat: update release workflow to use version input from auto-version workflow

* 📦 Chore(custom): add Clippy configuration for consistent linting across platforms

* chore: add Clippy configuration for consistent linting across platforms

* 🐛 Fix(custom): fix ci

* 🚧 WIP(custom): fix clippy error

* 🐛 Fix(custom): add default openlist and rclone version

* 🚧 WIP(custom): fix clippy errors

* 🚧 WIP(custom): fix clippy errors

* 🐛 Fix(custom): fix ci bugs
This commit is contained in:
Kuingsmile
2025-06-28 13:06:32 +08:00
committed by GitHub
parent bd8e54aa42
commit 5ade6a2c01
36 changed files with 1374 additions and 793 deletions

View File

@@ -1,134 +0,0 @@
name: 'Auto Version and Release'
on:
workflow_dispatch:
permissions:
contents: write
packages: write
jobs:
check-commits:
runs-on: ubuntu-latest
outputs:
should-release: ${{ steps.check.outputs.should-release }}
version-type: ${{ steps.check.outputs.version-type }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for conventional commits
id: check
run: |
# Get commits since last tag
LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
if [ -z "$LAST_TAG" ]; then
echo "No previous tag found, will create initial release"
echo "should-release=true" >> $GITHUB_OUTPUT
echo "version-type=minor" >> $GITHUB_OUTPUT
exit 0
fi
COMMITS=$(git log $LAST_TAG..HEAD --oneline)
if echo "$COMMITS" | grep -qE "^[a-f0-9]+ (feat|fix|BREAKING CHANGE)"; then
echo "Found commits that warrant a release"
echo "should-release=true" >> $GITHUB_OUTPUT
# Determine version bump type
if echo "$COMMITS" | grep -q "BREAKING CHANGE"; then
echo "version-type=major" >> $GITHUB_OUTPUT
elif echo "$COMMITS" | grep -q "feat"; then
echo "version-type=minor" >> $GITHUB_OUTPUT
else
echo "version-type=patch" >> $GITHUB_OUTPUT
fi
else
echo "No commits found that warrant a release"
echo "should-release=false" >> $GITHUB_OUTPUT
fi
auto-release:
needs: check-commits
if: needs.check-commits.outputs.should-release == 'true'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
- name: Calculate new version
id: version
run: |
# Get current version from package.json
CURRENT_VERSION=$(node -p "require('./package.json').version")
echo "Current version: $CURRENT_VERSION"
# Parse version
IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION"
MAJOR=${VERSION_PARTS[0]}
MINOR=${VERSION_PARTS[1]}
PATCH=${VERSION_PARTS[2]}
# Bump version based on commit type
case "${{ needs.check-commits.outputs.version-type }}" in
major)
MAJOR=$((MAJOR + 1))
MINOR=0
PATCH=0
;;
minor)
MINOR=$((MINOR + 1))
PATCH=0
;;
patch)
PATCH=$((PATCH + 1))
;;
esac
NEW_VERSION="$MAJOR.$MINOR.$PATCH"
echo "New version: $NEW_VERSION"
echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT
echo "tag=v$NEW_VERSION" >> $GITHUB_OUTPUT
- name: Update version in files
run: |
# Update package.json
npm version ${{ steps.version.outputs.version }} --no-git-tag-version
# Update Cargo.toml
sed -i 's/^version = "[^"]*"/version = "${{ steps.version.outputs.version }}"/' src-tauri/Cargo.toml
# Update tauri.conf.json
sed -i 's/"version": "[^"]*"/"version": "${{ steps.version.outputs.version }}"/' src-tauri/tauri.conf.json
- name: Commit version bump
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add package.json src-tauri/Cargo.toml src-tauri/tauri.conf.json
git commit -m "chore: bump version to ${{ steps.version.outputs.version }} [skip ci]"
git push
- name: Create and push tag
run: |
git tag ${{ steps.version.outputs.tag }}
git push origin ${{ steps.version.outputs.tag }}
- name: Trigger release workflow
run: |
curl -X POST \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/${{ github.repository }}/actions/workflows/release.yml/dispatches \
-d '{"ref":"main","inputs":{"version":"${{ steps.version.outputs.tag }}"}}'

197
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,197 @@
name: 'CI - Lint and Test'
on:
workflow_dispatch:
pull_request:
branches: [ main, dev ]
paths:
- 'src/**'
- 'src-tauri/**'
- 'package.json'
- 'package-lock.json'
- 'yarn.lock'
- 'Cargo.toml'
- 'Cargo.lock'
- '.github/workflows/ci.yml'
push:
branches: [ main, dev ]
paths:
- 'src/**'
- 'src-tauri/**'
- 'package.json'
- 'package-lock.json'
- 'yarn.lock'
- 'Cargo.toml'
- 'Cargo.lock'
- '.github/workflows/ci.yml'
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
frontend-lint:
name: Frontend Lint & Type Check
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'yarn'
- name: Install dependencies
run: yarn install
- name: Run ESLint
run: yarn lint
- name: Run TypeScript type check
run: yarn web:build --mode=production
rust-check:
name: Rust Check & Lint
runs-on: ${{ matrix.platform.os }}
strategy:
fail-fast: false
matrix:
platform:
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
- os: windows-latest
target: x86_64-pc-windows-msvc
- os: macos-13
target: x86_64-apple-darwin
- os: macos-latest
target: aarch64-apple-darwin
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.platform.target }}
components: rustfmt, clippy
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
cache-on-failure: true
- name: Install system dependencies (Linux)
if: matrix.platform.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'yarn'
- name: Install frontend dependencies
run: yarn install
- name: Run prebuild script
run: yarn prebuild:dev
- name: Check Rust formatting
working-directory: src-tauri
run: cargo fmt --all -- --check
- name: Run Clippy
working-directory: src-tauri
run: cargo clippy --target ${{ matrix.platform.target }} --all-targets --all-features -- -D warnings
- name: Run Cargo check
working-directory: src-tauri
run: cargo check --target ${{ matrix.platform.target }} --all-targets --all-features
- name: Run Rust tests
working-directory: src-tauri
run: cargo test --target ${{ matrix.platform.target }} --all-features
security-audit:
name: Security Audit
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'yarn'
- name: Install dependencies
run: yarn install
- name: Run npm audit
run: yarn audit --audit-level moderate
build-test:
name: Build Test
runs-on: ${{ matrix.platform.os }}
needs: [frontend-lint, rust-check]
strategy:
fail-fast: false
matrix:
platform:
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
- os: windows-latest
target: x86_64-pc-windows-msvc
- os: macos-13
target: x86_64-apple-darwin
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.platform.target }}
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
cache-on-failure: true
- name: Install system dependencies (Linux)
if: matrix.platform.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'yarn'
- name: Install frontend dependencies
run: yarn install
- name: Run prebuild script
run: yarn prebuild:dev
- name: Build frontend
run: yarn web:build
- name: Build Tauri application (test build)
working-directory: src-tauri
run: cargo build --target ${{ matrix.platform.target }} --release

View File

@@ -4,23 +4,248 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
version: version:
description: 'Version to release (e.g. v1.0.0)' description: 'Version to release (e.g., v1.0.0). Leave empty for auto-version based on conventional commits.'
required: false
type: string
skip_version_check:
description: 'Skip automatic version detection and use manual version'
required: false
type: boolean
default: false
workflow_call:
inputs:
version:
required: true required: true
type: string type: string
permissions: write-all
env: env:
CARGO_INCREMENTAL: 0
RUST_BACKTRACE: short
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
# macOS signing and notarization # macOS signing and notarization
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }} APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_ID: ${{ secrets.APPLE_ID }} APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }} APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
concurrency:
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
jobs: jobs:
check-commits:
name: Check Commits and Determine Version
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch' && (inputs.version == '' || inputs.skip_version_check == false)
outputs:
should-release: ${{ steps.check.outputs.should-release }}
version-type: ${{ steps.check.outputs.version-type }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for conventional commits
id: check
run: |
# Get commits since last tag
LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
if [ -z "$LAST_TAG" ]; then
echo "No previous tag found, will create initial release"
echo "should-release=true" >> $GITHUB_OUTPUT
echo "version-type=minor" >> $GITHUB_OUTPUT
exit 0
fi
COMMITS=$(git log $LAST_TAG..HEAD --oneline)
if echo "$COMMITS" | grep -qE "^[a-f0-9]+ (feat|fix|BREAKING CHANGE)"; then
echo "Found commits that warrant a release"
echo "should-release=true" >> $GITHUB_OUTPUT
# Determine version bump type
if echo "$COMMITS" | grep -q "BREAKING CHANGE"; then
echo "version-type=major" >> $GITHUB_OUTPUT
elif echo "$COMMITS" | grep -q "feat"; then
echo "version-type=minor" >> $GITHUB_OUTPUT
else
echo "version-type=patch" >> $GITHUB_OUTPUT
fi
else
echo "No commits found that warrant a release"
echo "should-release=false" >> $GITHUB_OUTPUT
fi
auto-version:
name: Calculate and Update Version
needs: check-commits
runs-on: ubuntu-latest
if: |
always() &&
(needs.check-commits.result == 'skipped' ||
(needs.check-commits.result == 'success' && needs.check-commits.outputs.should-release == 'true'))
outputs:
version: ${{ steps.version.outputs.version }}
tag: ${{ steps.version.outputs.tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
- name: Calculate new version
id: version
run: |
# If manual version is provided, use it
if [ "${{ github.event_name }}" = "workflow_dispatch" ] && [ -n "${{ inputs.version }}" ]; then
MANUAL_VERSION="${{ inputs.version }}"
# Remove 'v' prefix if present
NEW_VERSION="${MANUAL_VERSION#v}"
echo "Using manual version: $NEW_VERSION"
echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT
echo "tag=v$NEW_VERSION" >> $GITHUB_OUTPUT
exit 0
fi
# If triggered by workflow_call, use the provided version
if [ "${{ github.event_name }}" = "workflow_call" ]; then
CALL_VERSION="${{ inputs.version }}"
NEW_VERSION="${CALL_VERSION#v}"
echo "Using workflow_call version: $NEW_VERSION"
echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT
echo "tag=v$NEW_VERSION" >> $GITHUB_OUTPUT
exit 0
fi
# Auto-calculate version based on commits
CURRENT_VERSION=$(node -p "require('./package.json').version")
echo "Current version: $CURRENT_VERSION"
# Parse version
IFS='.' read -ra VERSION_PARTS <<< "$CURRENT_VERSION"
MAJOR=${VERSION_PARTS[0]}
MINOR=${VERSION_PARTS[1]}
PATCH=${VERSION_PARTS[2]}
# Bump version based on commit type
case "${{ needs.check-commits.outputs.version-type }}" in
major)
MAJOR=$((MAJOR + 1))
MINOR=0
PATCH=0
;;
minor)
MINOR=$((MINOR + 1))
PATCH=0
;;
patch)
PATCH=$((PATCH + 1))
;;
esac
NEW_VERSION="$MAJOR.$MINOR.$PATCH"
echo "New version: $NEW_VERSION"
echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT
echo "tag=v$NEW_VERSION" >> $GITHUB_OUTPUT
- name: Update version in files
if: github.event_name == 'workflow_dispatch' && inputs.version == ''
run: |
# Update package.json
npm version ${{ steps.version.outputs.version }} --no-git-tag-version
# Update Cargo.toml
sed -i 's/^version = "[^"]*"/version = "${{ steps.version.outputs.version }}"/' src-tauri/Cargo.toml
# Update tauri.conf.json
sed -i 's/"version": "[^"]*"/"version": "${{ steps.version.outputs.version }}"/' src-tauri/tauri.conf.json
- name: Commit version bump
if: github.event_name == 'workflow_dispatch' && inputs.version == ''
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git add package.json src-tauri/Cargo.toml src-tauri/tauri.conf.json
git commit -m "chore: bump version to ${{ steps.version.outputs.version }} [skip ci]"
git push
- name: Create and push tag
if: github.event_name == 'workflow_dispatch' && inputs.version == ''
run: |
git tag ${{ steps.version.outputs.tag }}
git push origin ${{ steps.version.outputs.tag }}
check_tag_version:
name: Check Tag and All Version Files Consistency
needs: auto-version
runs-on: ubuntu-latest
if: github.event_name != 'workflow_dispatch' || inputs.version != ''
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Ensure jq and grep are installed
run: sudo apt-get update && sudo apt-get install -y jq
- name: Validate tag matches versions in package.json, Cargo.toml, tauri.conf.json
run: |
# Get the tag to validate
if [ "${{ github.event_name }}" = "workflow_dispatch" ] && [ -n "${{ inputs.version }}" ]; then
TAG_REF="${{ inputs.version }}"
elif [ "${{ github.event_name }}" = "workflow_call" ]; then
TAG_REF="${{ inputs.version }}"
else
TAG_REF="${GITHUB_REF##*/}" # e.g., v1.2.3
fi
TAG_VERSION="${TAG_REF#v}" # Remove "v" prefix for direct comparison
echo "Tag to validate: $TAG_REF"
# Get version from package.json
PKG_VERSION=$(jq -r .version package.json)
echo "package.json version: $PKG_VERSION"
# Get version from tauri.conf.json
TAURI_VERSION=$(jq -r .package.version src-tauri/tauri.conf.json)
echo "tauri.conf.json version: $TAURI_VERSION"
# Get version from Cargo.toml using grep/sed
CARGO_VERSION=$(grep '^version' src-tauri/Cargo.toml | head -n1 | sed 's/version = "\(.*\)"/\1/')
echo "Cargo.toml version: $CARGO_VERSION"
# Check all match
if [[ "$TAG_VERSION" != "$PKG_VERSION" ]]; then
echo "❌ Tag version ($TAG_VERSION) does not match package.json version ($PKG_VERSION)."
exit 1
fi
if [[ "$TAG_VERSION" != "$TAURI_VERSION" ]]; then
echo "❌ Tag version ($TAG_VERSION) does not match tauri.conf.json version ($TAURI_VERSION)."
exit 1
fi
if [[ "$TAG_VERSION" != "$CARGO_VERSION" ]]; then
echo "❌ Tag version ($TAG_VERSION) does not match Cargo.toml version ($CARGO_VERSION)."
exit 1
fi
echo "✅ Tag version matches all version files."
changelog: changelog:
name: Generate Changelog name: Generate Changelog
needs: [auto-version, check_tag_version]
if: always() && (needs.auto-version.result == 'success' && (needs.check_tag_version.result == 'success' || needs.check_tag_version.result == 'skipped'))
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
changelog: ${{ steps.changelog.outputs.changelog }} changelog: ${{ steps.changelog.outputs.changelog }}
@@ -34,8 +259,12 @@ jobs:
- name: Get tag - name: Get tag
id: tag id: tag
run: | run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then if [ -n "${{ needs.auto-version.outputs.tag }}" ]; then
echo "tag=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT echo "tag=${{ needs.auto-version.outputs.tag }}" >> $GITHUB_OUTPUT
elif [ "${{ github.event_name }}" = "workflow_dispatch" ] && [ -n "${{ inputs.version }}" ]; then
echo "tag=${{ inputs.version }}" >> $GITHUB_OUTPUT
elif [ "${{ github.event_name }}" = "workflow_call" ]; then
echo "tag=${{ inputs.version }}" >> $GITHUB_OUTPUT
else else
echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
fi fi
@@ -70,56 +299,62 @@ jobs:
path: changelog.md path: changelog.md
build: build:
needs: changelog needs: [changelog, auto-version]
if: always() && needs.changelog.result == 'success'
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- platform: 'macos-latest' - os: windows-latest
args: '--target aarch64-apple-darwin' target: x86_64-pc-windows-msvc
target: 'aarch64-apple-darwin' - os: windows-latest
- platform: 'macos-latest' target: aarch64-pc-windows-msvc
args: '--target x86_64-apple-darwin' - os: macos-latest
target: 'x86_64-apple-darwin' target: aarch64-apple-darwin
- platform: 'ubuntu-20.04' - os: macos-latest
args: '--target x86_64-unknown-linux-gnu' target: x86_64-apple-darwin
target: 'x86_64-unknown-linux-gnu' - os: ubuntu-22.04
- platform: 'windows-latest' target: x86_64-unknown-linux-gnu
args: '--target x86_64-pc-windows-msvc'
target: 'x86_64-pc-windows-msvc'
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install Rust Stable
uses: dtolnay/rust-toolchain@stable
- name: Add Rust Target
run: rustup target add ${{ matrix.target }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
save-if: false
- name: Install dependencies (ubuntu only) - name: Install dependencies (ubuntu only)
if: matrix.platform == 'ubuntu-20.04' if: matrix.os == 'ubuntu-22.04'
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Rust setup - name: Install Node
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.target }}
- name: Rust cache
uses: swatinem/rust-cache@v2
with:
workspaces: './src-tauri -> target'
- name: Sync node version and setup cache
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 'lts/*' node-version: "22"
cache: 'npm'
- name: Run install
- name: Install frontend dependencies uses: borales/actions-yarn@v4
run: npm ci with:
cmd: install
- name: install and check
run: |
yarn install
yarn run prebuild:dev --target=${{ matrix.target }}
- name: Import Apple Developer Certificate (macOS only) - name: Import Apple Developer Certificate (macOS only)
if: matrix.platform == 'macos-latest' if: matrix.os == 'macos-latest'
uses: apple-actions/import-codesign-certs@v2 uses: apple-actions/import-codesign-certs@v2
with: with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }} p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
@@ -128,7 +363,10 @@ jobs:
- name: Build the app - name: Build the app
uses: tauri-apps/tauri-action@v0 uses: tauri-apps/tauri-action@v0
env: env:
NODE_OPTIONS: "--max_old_space_size=4096"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
# macOS signing and notarization environment variables # macOS signing and notarization environment variables
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }} APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
@@ -137,23 +375,147 @@ jobs:
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
# Enable signing and notarization for macOS # Enable signing and notarization for macOS
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
ENABLE_CODE_SIGNING: ${{ matrix.platform == 'macos-latest' && 'true' || 'false' }}
with: with:
tagName: ${{ needs.changelog.outputs.tag }} tagName: ${{ needs.changelog.outputs.tag }}
releaseName: 'OpenList Desktop ${{ needs.changelog.outputs.tag }}' releaseName: 'OpenList Desktop ${{ needs.changelog.outputs.tag }}'
releaseBody: ${{ needs.changelog.outputs.changelog }} releaseBody: ${{ needs.changelog.outputs.changelog }}
releaseDraft: false releaseDraft: false
prerelease: false prerelease: false
args: ${{ matrix.args }} args: --target ${{ matrix.target }}
release-for-linux-arm:
name: Release Build for Linux ARM
needs: [changelog, auto-version]
if: always() && needs.changelog.result == 'success'
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-22.04
target: aarch64-unknown-linux-gnu
arch: arm64
- os: ubuntu-22.04
target: armv7-unknown-linux-gnueabihf
arch: armhf
runs-on: ${{ matrix.os }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install Rust Stable
uses: dtolnay/rust-toolchain@stable
- name: Add Rust Target
run: rustup target add ${{ matrix.target }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
save-if: false
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Run install
uses: borales/actions-yarn@v4
with:
cmd: install
- name: install and check
run: |
yarn install
yarn run prebuild:dev --target=${{ matrix.target }}
- name: "Setup for linux"
run: |-
sudo ls -lR /etc/apt/
cat > /tmp/sources.list << EOF
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-security main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-updates main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-backports main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-security main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-updates main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-backports main multiverse universe restricted
EOF
sudo mv /etc/apt/sources.list /etc/apt/sources.list.default
sudo mv /tmp/sources.list /etc/apt/sources.list
sudo dpkg --add-architecture ${{ matrix.arch }}
sudo apt update
sudo apt install -y \
libxslt1.1:${{ matrix.arch }} \
libwebkit2gtk-4.1-dev:${{ matrix.arch }} \
libayatana-appindicator3-dev:${{ matrix.arch }} \
libssl-dev:${{ matrix.arch }} \
patchelf:${{ matrix.arch }} \
librsvg2-dev:${{ matrix.arch }}
- name: "Install aarch64 tools"
if: matrix.target == 'aarch64-unknown-linux-gnu'
run: |
sudo apt install -y \
gcc-aarch64-linux-gnu \
g++-aarch64-linux-gnu
- name: "Install armv7 tools"
if: matrix.target == 'armv7-unknown-linux-gnueabihf'
run: |
sudo apt install -y \
gcc-arm-linux-gnueabihf \
g++-arm-linux-gnueabihf
- name: Build for Linux
run: |
export PKG_CONFIG_ALLOW_CROSS=1
if [ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]; then
export PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig/:$PKG_CONFIG_PATH
export PKG_CONFIG_SYSROOT_DIR=/usr/aarch64-linux-gnu/
elif [ "${{ matrix.target }}" == "armv7-unknown-linux-gnueabihf" ]; then
export PKG_CONFIG_PATH=/usr/lib/arm-linux-gnueabihf/pkgconfig/:$PKG_CONFIG_PATH
export PKG_CONFIG_SYSROOT_DIR=/usr/arm-linux-gnueabihf/
fi
pnpm build --target ${{ matrix.target }}
env:
NODE_OPTIONS: "--max_old_space_size=4096"
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
- name: Get Version
run: |
sudo apt-get update
sudo apt-get install jq
echo "VERSION=$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
- name: Upload Release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ needs.changelog.outputs.tag }}
name: 'OpenList Desktop ${{ needs.changelog.outputs.tag }}'
body: ${{ needs.changelog.outputs.changelog }}
token: ${{ secrets.GITHUB_TOKEN }}
files: |
src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb
src-tauri/target/${{ matrix.target }}/release/bundle/rpm/*.rpm
publish: publish:
needs: [changelog, build] name: Publish Release
needs: [changelog, build, release-for-linux-arm, auto-version]
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() && needs.build.result == 'success' if: always() && needs.build.result == 'success' && needs.changelog.result == 'success'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Download changelog - name: Download changelog
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:
@@ -169,60 +531,3 @@ jobs:
prerelease: false prerelease: false
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Update latest.json for auto-updater
run: |
# Get the latest release info
RELEASE_INFO=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"https://api.github.com/repos/${{ github.repository }}/releases/latest")
# Extract version and download URLs
VERSION=$(echo "$RELEASE_INFO" | jq -r '.tag_name')
RELEASE_NOTES=$(echo "$RELEASE_INFO" | jq -r '.body')
RELEASE_DATE=$(echo "$RELEASE_INFO" | jq -r '.published_at')
# Create latest.json for Tauri updater
cat > latest.json << EOF
{
"version": "$VERSION",
"notes": $(echo "$RELEASE_NOTES" | jq -R -s .),
"pub_date": "$RELEASE_DATE",
"platforms": {
"darwin-x86_64": {
"signature": "",
"url": "https://github.com/${{ github.repository }}/releases/download/$VERSION/OpenList-Desktop_${VERSION}_x64_mac.dmg.tar.gz"
},
"darwin-aarch64": {
"signature": "",
"url": "https://github.com/${{ github.repository }}/releases/download/$VERSION/OpenList-Desktop_${VERSION}_aarch64_mac.dmg.tar.gz"
},
"linux-x86_64": {
"signature": "",
"url": "https://github.com/${{ github.repository }}/releases/download/$VERSION/openlist-desktop_${VERSION}_amd64.AppImage.tar.gz"
},
"windows-x86_64": {
"signature": "",
"url": "https://github.com/${{ github.repository }}/releases/download/$VERSION/OpenList-Desktop_${VERSION}_x64_en-US.msi.zip"
}
}
}
EOF
echo "Generated latest.json for auto-updater"
cat latest.json
- name: Commit and push latest.json
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
# Check if there are changes to commit
if [[ -n $(git status --porcelain) ]]; then
git add latest.json
git commit -m "Update latest.json for auto-updater [skip ci]"
git push origin HEAD:main
else
echo "No changes to commit"
fi
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

25
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,25 @@
# Pre-commit hook configuration
# Install with: yarn add -D husky lint-staged
# Then run: npx husky install
repos:
- repo: local
hooks:
- id: frontend-lint
name: Frontend Lint
entry: yarn lint:fix
language: system
files: \.(ts|vue|js)$
- id: rust-fmt
name: Rust Format
entry: bash -c 'cd src-tauri && cargo fmt --all'
language: system
files: \.rs$
- id: rust-clippy
name: Rust Clippy
entry: bash -c 'cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings'
language: system
files: \.rs$
pass_filenames: false

View File

@@ -147,7 +147,7 @@ npm run tauri build
#### Windows #### Windows
1. 下载 `.msi` 安装程序 1. 下载 `.exe` 安装程序
2. 以管理员身份运行安装程序 2. 以管理员身份运行安装程序
3. 按照安装向导进行操作 3. 按照安装向导进行操作
4. 从开始菜单或桌面快捷方式启动 4. 从开始菜单或桌面快捷方式启动
@@ -161,10 +161,13 @@ npm run tauri build
#### Linux #### Linux
1. 下载 `.AppImage` 文件 1. 下载 `.deb``.rpm`
2. 使其可执行:`chmod +x OpenList-Desktop*.AppImage` 2. 使用包管理器安装:
3. 运行 AppImage`./OpenList-Desktop*.AppImage` ```bash
4. 可选:使用 AppImageLauncher 安装以进行系统集成 sudo dpkg -i OpenList-Desktop_x.x.x_amd64.deb
# 或者
sudo rpm -i OpenList-Desktop_x.x.x_amd64.rpm
```
## 🚀 使用说明 ## 🚀 使用说明
@@ -179,7 +182,7 @@ npm run tauri build
#### 启动服务 #### 启动服务
``` ```bash
仪表板 → 服务管理 → 启动 OpenList 服务 仪表板 → 服务管理 → 启动 OpenList 服务
仪表板 → 快速操作 → 启动 Rclone 后端 仪表板 → 快速操作 → 启动 Rclone 后端
``` ```
@@ -227,7 +230,6 @@ npm run tauri build
- **右键单击托盘图标** 进行快速操作 - **右键单击托盘图标** 进行快速操作
- **双击** 显示/隐藏主窗口 - **双击** 显示/隐藏主窗口
- **服务状态** 通过图标颜色指示
## ⚙️ 配置 ## ⚙️ 配置
@@ -291,7 +293,7 @@ npm run tauri build
#### 先决条件 #### 先决条件
- **Node.js**v18+ 和 npm - **Node.js**v22+ 和 yarn
- **Rust**:最新稳定版本 - **Rust**:最新稳定版本
- **Git**:版本控制 - **Git**:版本控制
@@ -303,7 +305,7 @@ git clone https://github.com/OpenListTeam/openlist-desktop.git
cd openlist-desktop cd openlist-desktop
# 安装 Node.js 依赖 # 安装 Node.js 依赖
npm install yarn install
# 安装 Rust 依赖 # 安装 Rust 依赖
cd src-tauri cd src-tauri
@@ -311,29 +313,36 @@ cargo fetch
# 准备开发环境 # 准备开发环境
cd .. cd ..
npm run prepare-dev yarn run prebuild:dev
# 启动开发服务器 # 启动开发服务器
npm run dev yarn run dev
``` ```
#### 开发命令 #### 开发命令
```bash ```bash
# 启动带热重载的开发服务器 # 启动带热重载的开发服务器
npm run dev yarn run dev
# 启动不带文件监视的开发 # 启动不带文件监视的开发
npm run nowatch yarn run nowatch
# 运行代码检查 # 运行代码检查
npm run lint yarn run lint
# 修复代码检查问题 # 修复代码检查问题
npm run lint:fix yarn run lint:fix
# 类型检查 # 类型检查
npm run build --dry-run yarn run build --dry-run
```
#### 提交PR
```bash
git add .
yarn cz
``` ```
## 🤝 贡献 ## 🤝 贡献

View File

@@ -147,7 +147,7 @@ npm run tauri build
#### Windows #### Windows
1. Download the `.msi` installer 1. Download the `.exe` installer
2. Run the installer as Administrator 2. Run the installer as Administrator
3. Follow the installation wizard 3. Follow the installation wizard
4. Launch from Start Menu or Desktop shortcut 4. Launch from Start Menu or Desktop shortcut
@@ -161,10 +161,14 @@ npm run tauri build
#### Linux #### Linux
1. Download the `.AppImage` file 1. Download the `.deb` or `.rpm` package
2. Make it executable: `chmod +x OpenList-Desktop*.AppImage` 2. Use your package manager to install:
3. Run the AppImage: `./OpenList-Desktop*.AppImage`
4. Optional: Install using AppImageLauncher for system integration ```bash
sudo dpkg -i OpenList-Desktop_x.x.x_amd64.deb
# or
sudo rpm -i OpenList-Desktop_x.x.x_amd64.rpm
```
## 🚀 Usage ## 🚀 Usage
@@ -179,7 +183,7 @@ npm run tauri build
#### Starting Services #### Starting Services
``` ```bash
Dashboard → Service Management → Start OpenList Service Dashboard → Service Management → Start OpenList Service
Dashboard → Quick Actions → Start Rclone Backend Dashboard → Quick Actions → Start Rclone Backend
``` ```
@@ -227,7 +231,6 @@ Add custom Rclone flags for optimal performance:
- **Right-click tray icon** for quick actions - **Right-click tray icon** for quick actions
- **Double-click** to show/hide main window - **Double-click** to show/hide main window
- **Service status** indicated by icon color
## ⚙️ Configuration ## ⚙️ Configuration
@@ -291,7 +294,7 @@ Add custom Rclone flags for optimal performance:
#### Prerequisites #### Prerequisites
- **Node.js**: v18+ with npm - **Node.js**: v22+ with yarn
- **Rust**: Latest stable version - **Rust**: Latest stable version
- **Git**: Version control - **Git**: Version control
@@ -303,7 +306,7 @@ git clone https://github.com/OpenListTeam/openlist-desktop.git
cd openlist-desktop cd openlist-desktop
# Install Node.js dependencies # Install Node.js dependencies
npm install yarn install
# Install Rust dependencies # Install Rust dependencies
cd src-tauri cd src-tauri
@@ -311,34 +314,35 @@ cargo fetch
# Prepare development environment # Prepare development environment
cd .. cd ..
npm run prepare-dev yarn run prebuild:dev
# Start development server # Start development server
npm run dev yarn run dev
``` ```
#### Development Commands #### Development Commands
```bash ```bash
# Start development server with hot reload # Start development server with hot reload
npm run dev yarn run dev
# Start development without file watching # Start development without file watching
npm run nowatch yarn run nowatch
# Run linting # Run linting
npm run lint yarn run lint
# Fix linting issues # Fix linting issues
npm run lint:fix yarn run lint:fix
# Type checking # Type checking
npm run build --dry-run yarn run build --dry-run
``` ```
## 🤝 Contributing ## 🤝 Contributing
We welcome contributions from the community! We welcome contributions from the community!
## 📄 License ## 📄 License
This project is licensed under the **GNU General Public License v3.0** - see the [LICENSE](./LICENSE) file for details. This project is licensed under the **GNU General Public License v3.0** - see the [LICENSE](./LICENSE) file for details.

View File

@@ -19,15 +19,26 @@
"homepage": "https://github.com/OpenListTeam/openlist-desktop", "homepage": "https://github.com/OpenListTeam/openlist-desktop",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",
"build": "vue-tsc --noEmit && vite build", "web:build": "tsc --noEmit && vite build",
"preview": "vite preview", "web:preview": "vite preview",
"build": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build",
"tauri:dev": "cross-env RUST_BACKTRACE=1 tauri dev",
"tauri": "tauri", "tauri": "tauri",
"nowatch": "tauri dev --no-watch", "nowatch": "tauri dev --no-watch",
"lint": "eslint src/**/*.ts", "lint": "eslint src/**/*.ts",
"lint:fix": "eslint src/**/*.ts --fix", "lint:fix": "eslint src/**/*.ts --fix",
"cz": "git-cz", "cz": "git-cz",
"release": "bump-version", "release": "bump-version",
"prepare-dev": "node scripts/prepare.js" "prebuild:dev": "node scripts/prepare.js",
"check:rust": "cd src-tauri && cargo check --all-targets --all-features",
"check:rust:fmt": "cd src-tauri && cargo fmt --all -- --check",
"check:rust:clippy": "cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings",
"check:rust:all": "yarn check:rust:fmt && yarn check:rust:clippy && yarn check:rust",
"check:frontend": "yarn lint && tsc --noEmit",
"check:all": "yarn check:frontend && yarn check:rust:all",
"fix:rust": "cd src-tauri && cargo fmt --all && cargo clippy --all-targets --all-features --fix --allow-dirty",
"fix:frontend": "yarn lint:fix",
"fix:all": "yarn fix:frontend && yarn fix:rust"
}, },
"config": { "config": {
"commitizen": { "commitizen": {
@@ -68,11 +79,14 @@
"@typescript-eslint/parser": "^8.35.0", "@typescript-eslint/parser": "^8.35.0",
"@vitejs/plugin-vue": "^6.0.0", "@vitejs/plugin-vue": "^6.0.0",
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"cross-env": "^7.0.3",
"eslint": "^9.29.0", "eslint": "^9.29.0",
"eslint-plugin-simple-import-sort": "^12.1.1", "eslint-plugin-simple-import-sort": "^12.1.1",
"eslint-plugin-unicorn": "^59.0.1", "eslint-plugin-unicorn": "^59.0.1",
"fs-extra": "^11.3.0", "fs-extra": "^11.3.0",
"https-proxy-agent": "^7.0.6", "https-proxy-agent": "^7.0.6",
"husky": "^9.1.7",
"lint-staged": "^16.1.2",
"node-bump-version": "^2.0.0", "node-bump-version": "^2.0.0",
"node-fetch": "^3.3.2", "node-fetch": "^3.3.2",
"tar": "^7.4.3", "tar": "^7.4.3",

View File

@@ -27,13 +27,13 @@ if (!getOpenlistArchMap[platformArch]) {
} }
// Rclone version management // Rclone version management
let rcloneVersion = 'v1.70.0' let rcloneVersion = 'v1.70.1'
const rcloneVersionUrl = 'https://github.com/rclone/rclone/releases/latest/download/version.txt' const rcloneVersionUrl = 'https://github.com/rclone/rclone/releases/latest/download/version.txt'
async function getLatestRcloneVersion() { async function getLatestRcloneVersion() {
try { try {
const response = await fetch(rcloneVersionUrl, getFetchOptions()) const response = await fetch(rcloneVersionUrl, getFetchOptions())
rcloneVersion = (await response.text()).trim().replace('rclone ', '') rcloneVersion = (await response.text()).trim().replace('rclone ', '') || '1.70.1'
console.log(`Latest rclone version: ${rcloneVersion}`) console.log(`Latest rclone version: ${rcloneVersion}`)
} catch (error) { } catch (error) {
console.log('Error fetching latest rclone version:', error.message) console.log('Error fetching latest rclone version:', error.message)
@@ -41,7 +41,7 @@ async function getLatestRcloneVersion() {
} }
// openlist version management // openlist version management
let openlistVersion = 'v4.0.0' let openlistVersion = 'v4.0.3'
async function getLatestOpenlistVersion() { async function getLatestOpenlistVersion() {
try { try {
@@ -50,7 +50,7 @@ async function getLatestOpenlistVersion() {
getFetchOptions() getFetchOptions()
) )
const data = await response.json() const data = await response.json()
openlistVersion = data.tag_name openlistVersion = data.tag_name || 'v4.0.3'
console.log(`Latest OpenList version: ${openlistVersion}`) console.log(`Latest OpenList version: ${openlistVersion}`)
} catch (error) { } catch (error) {
console.log('Error fetching latest OpenList version:', error.message) console.log('Error fetching latest OpenList version:', error.message)

View File

@@ -0,0 +1,16 @@
[target.x86_64-pc-windows-msvc]
# Windows-specific settings
rustflags = ["-C", "target-feature=+crt-static"]
[target.x86_64-unknown-linux-gnu]
# Linux-specific settings
rustflags = ["-C", "link-arg=-Wl,--compress-debug-sections=zlib"]
[target.x86_64-apple-darwin]
# macOS-specific settings
rustflags = ["-C", "link-arg=-Wl,-dead_strip"]
[target.aarch64-apple-darwin]
# macOS ARM-specific settings
rustflags = ["-C", "link-arg=-Wl,-dead_strip"]

9
src-tauri/clippy.toml Normal file
View File

@@ -0,0 +1,9 @@
# Clippy configuration for consistent linting across platforms
# Deny warnings that should be errors
cognitive-complexity-threshold = 30
too-many-arguments-threshold = 7
type-complexity-threshold = 250
# Allow some clippy lints that are too noisy
avoid-breaking-exported-api = false

View File

@@ -3,47 +3,11 @@ hard_tabs = false
tab_spaces = 4 tab_spaces = 4
newline_style = "Auto" newline_style = "Auto"
edition = "2024" edition = "2024"
use_small_heuristics = "Default"
reorder_imports = true reorder_imports = true
reorder_modules = true reorder_modules = true
imports_granularity = "Crate"
group_imports = "StdExternalCrate"
use_small_heuristics = "Default"
remove_nested_parens = true remove_nested_parens = true
merge_derives = true merge_derives = true
use_try_shorthand = true use_try_shorthand = false
use_field_init_shorthand = true use_field_init_shorthand = false
force_explicit_abi = true force_explicit_abi = true
fn_single_line = false
where_single_line = false
force_multiline_blocks = false
brace_style = "SameLineWhere"
format_strings = false
format_macro_matchers = true
normalize_comments = true
normalize_doc_attributes = false
wrap_comments = true
comment_width = 80
trailing_comma = "Vertical"
trailing_semicolon = true
spaces_around_ranges = false
binop_separator = "Front"
chain_width = 60
match_block_trailing_comma = false
match_arm_blocks = true
control_brace_style = "AlwaysSameLine"
single_line_if_else_max_width = 50
blank_lines_upper_bound = 1
blank_lines_lower_bound = 0
empty_item_single_line = true
struct_lit_single_line = true
fn_params_layout = "Tall"

View File

@@ -26,8 +26,8 @@ pub async fn get_binary_version(binary_name: Option<String>) -> Result<String, S
.split_whitespace() .split_whitespace()
.nth(1) .nth(1)
.ok_or("Failed to parse version")?; .ok_or("Failed to parse version")?;
return Ok(version.to_string()); Ok(version.to_string())
} else { } else {
return Err("Failed to get OpenList binary version".to_string()); Err("Failed to get OpenList binary version".to_string())
} }
} }

View File

@@ -1,6 +1,7 @@
use std::fs; use std::fs;
use tauri::State; use tauri::State;
use crate::cmd::http_api::{get_process_list, start_process, stop_process};
use crate::conf::config::MergedSettings; use crate::conf::config::MergedSettings;
use crate::object::structs::AppState; use crate::object::structs::AppState;
use crate::utils::path::app_config_file_path; use crate::utils::path::app_config_file_path;
@@ -19,6 +20,72 @@ pub async fn save_settings(
Ok(true) Ok(true)
} }
#[tauri::command]
pub async fn save_settings_with_update_port(
settings: MergedSettings,
state: State<'_, AppState>,
) -> Result<bool, String> {
save_settings(settings.clone(), state.clone()).await?;
let app_dir = std::env::current_exe()
.map_err(|e| format!("Failed to get current exe path: {e}"))?
.parent()
.ok_or("Failed to get parent directory")?
.to_path_buf();
let data_config_path = app_dir.join("data").join("config.json");
if let Some(parent) = data_config_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| e.to_string())?;
}
let mut config = if data_config_path.exists() {
let content =
std::fs::read_to_string(data_config_path.clone()).map_err(|e| e.to_string())?;
serde_json::from_str(&content).map_err(|e| e.to_string())?
} else {
serde_json::json!({
"scheme": {
"http_port": settings.openlist.port,
}
})
};
if let Some(scheme) = config.get_mut("scheme") {
if let Some(scheme_obj) = scheme.as_object_mut() {
scheme_obj.insert(
"http_port".to_string(),
serde_json::Value::Number(serde_json::Number::from(settings.openlist.port)),
);
}
} else {
config["scheme"] = serde_json::json!({
"http_port": settings.openlist.port
});
}
let content = serde_json::to_string_pretty(&config).map_err(|e| e.to_string())?;
std::fs::write(data_config_path, content).map_err(|e| e.to_string())?;
// Stop the OpenList core process
let process_list = get_process_list(state.clone()).await?;
if let Some(existing_process) = process_list
.iter()
.find(|p| p.config.name == "single_openlist_core_process")
{
match stop_process(existing_process.config.id.clone(), state.clone()).await {
Ok(_) => log::info!("OpenList core process stopped successfully"),
Err(e) => log::warn!("Failed to stop OpenList core process: {e}"),
}
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
match start_process(existing_process.config.id.clone(), state.clone()).await {
Ok(_) => log::info!("OpenList core process started successfully with new port"),
Err(e) => {
log::error!("Failed to start OpenList core process: {e}");
return Err(format!(
"Failed to restart OpenList core with new port: {e}"
));
}
}
}
log::info!("Settings saved and OpenList core restarted with new port successfully");
Ok(true)
}
#[tauri::command] #[tauri::command]
pub async fn load_settings(state: State<'_, AppState>) -> Result<Option<MergedSettings>, String> { pub async fn load_settings(state: State<'_, AppState>) -> Result<Option<MergedSettings>, String> {
state.load_settings()?; state.load_settings()?;

View File

@@ -67,10 +67,10 @@ fn get_current_platform() -> String {
let arch = env::consts::ARCH; let arch = env::consts::ARCH;
match os { match os {
"windows" => format!("{}-pc-windows-msvc", arch), "windows" => format!("{arch}-pc-windows-msvc"),
"macos" => format!("{}-apple-darwin", arch), "macos" => format!("{arch}-apple-darwin"),
"linux" => format!("{}-unknown-linux-gnu", arch), "linux" => format!("{arch}-unknown-linux-gnu"),
_ => format!("{}-{}", arch, os), _ => format!("{arch}-{os}"),
} }
} }
@@ -178,8 +178,8 @@ pub async fn check_for_updates() -> Result<UpdateCheck, String> {
.send() .send()
.await .await
.map_err(|e| { .map_err(|e| {
let error_msg = format!("Network error while checking for updates: {}", e); let error_msg = format!("Network error while checking for updates: {e}");
log::error!("{}", error_msg); log::error!("{error_msg}");
error_msg error_msg
})?; })?;
@@ -196,13 +196,13 @@ pub async fn check_for_updates() -> Result<UpdateCheck, String> {
status.canonical_reason().unwrap_or("Unknown") status.canonical_reason().unwrap_or("Unknown")
) )
}; };
log::error!("{}", error_msg); log::error!("{error_msg}");
return Err(error_msg); return Err(error_msg);
} }
let release: GitHubRelease = response.json().await.map_err(|e| { let release: GitHubRelease = response.json().await.map_err(|e| {
log::error!("Failed to parse GitHub response: {}", e); log::error!("Failed to parse GitHub response: {e}");
format!("Failed to parse update information: {}", e) format!("Failed to parse update information: {e}")
})?; })?;
let current_version = env!("CARGO_PKG_VERSION"); let current_version = env!("CARGO_PKG_VERSION");
@@ -235,14 +235,14 @@ pub async fn download_update(
asset_url: String, asset_url: String,
asset_name: String, asset_name: String,
) -> Result<String, String> { ) -> Result<String, String> {
log::info!("Starting download of update: {}", asset_name); log::info!("Starting download of update: {asset_name}");
let client = Client::new(); let client = Client::new();
let temp_dir = std::env::temp_dir(); let temp_dir = std::env::temp_dir();
let file_path = temp_dir.join(&asset_name); let file_path = temp_dir.join(&asset_name);
log::info!("Downloading to: {:?}", file_path); log::info!("Downloading to: {file_path:?}");
let mut response = client let mut response = client
.get(&asset_url) .get(&asset_url)
@@ -251,8 +251,8 @@ pub async fn download_update(
.send() .send()
.await .await
.map_err(|e| { .map_err(|e| {
let error_msg = format!("Failed to start download: {}", e); let error_msg = format!("Failed to start download: {e}");
log::error!("{}", error_msg); log::error!("{error_msg}");
error_msg error_msg
})?; })?;
@@ -267,16 +267,16 @@ pub async fn download_update(
status.canonical_reason().unwrap_or("Unknown") status.canonical_reason().unwrap_or("Unknown")
) )
}; };
log::error!("{}", error_msg); log::error!("{error_msg}");
return Err(error_msg); return Err(error_msg);
} }
let total_size = response.content_length().unwrap_or(0); let total_size = response.content_length().unwrap_or(0);
log::info!("Download size: {} bytes", total_size); log::info!("Download size: {total_size} bytes");
let mut file = tokio::fs::File::create(&file_path).await.map_err(|e| { let mut file = tokio::fs::File::create(&file_path).await.map_err(|e| {
log::error!("Failed to create download file: {}", e); log::error!("Failed to create download file: {e}");
format!("Failed to create file: {}", e) format!("Failed to create file: {e}")
})?; })?;
let mut downloaded = 0u64; let mut downloaded = 0u64;
@@ -284,12 +284,12 @@ pub async fn download_update(
let mut last_downloaded = 0u64; let mut last_downloaded = 0u64;
while let Some(chunk) = response.chunk().await.map_err(|e| { while let Some(chunk) = response.chunk().await.map_err(|e| {
log::error!("Download chunk error: {}", e); log::error!("Download chunk error: {e}");
format!("Download error: {}", e) format!("Download error: {e}")
})? { })? {
file.write_all(&chunk).await.map_err(|e| { file.write_all(&chunk).await.map_err(|e| {
log::error!("File write error: {}", e); log::error!("File write error: {e}");
format!("File write error: {}", e) format!("File write error: {e}")
})?; })?;
downloaded += chunk.len() as u64; downloaded += chunk.len() as u64;
@@ -317,7 +317,7 @@ pub async fn download_update(
}; };
if let Err(e) = app.emit("download-progress", &progress) { if let Err(e) = app.emit("download-progress", &progress) {
log::error!("Failed to emit download progress: {}", e); log::error!("Failed to emit download progress: {e}");
} }
last_progress_time = now; last_progress_time = now;
@@ -326,14 +326,14 @@ pub async fn download_update(
} }
file.flush().await.map_err(|e| { file.flush().await.map_err(|e| {
log::error!("Failed to flush file: {}", e); log::error!("Failed to flush file: {e}");
format!("File flush error: {}", e) format!("File flush error: {e}")
})?; })?;
log::info!("Download completed: {} bytes", downloaded); log::info!("Download completed: {downloaded} bytes");
if let Err(e) = app.emit("update-download-completed", ()) { if let Err(e) = app.emit("update-download-completed", ()) {
log::error!("Failed to emit download completed event: {}", e); log::error!("Failed to emit download completed event: {e}");
} }
Ok(file_path.to_string_lossy().to_string()) Ok(file_path.to_string_lossy().to_string())
@@ -344,18 +344,18 @@ pub async fn install_update_and_restart(
app: AppHandle, app: AppHandle,
installer_path: String, installer_path: String,
) -> Result<(), String> { ) -> Result<(), String> {
log::info!("Installing update from: {}", installer_path); log::info!("Installing update from: {installer_path}");
let path = PathBuf::from(&installer_path); let path = PathBuf::from(&installer_path);
if !path.exists() { if !path.exists() {
let error_msg = "Installer file not found".to_string(); let error_msg = "Installer file not found".to_string();
log::error!("{}", error_msg); log::error!("{error_msg}");
return Err(error_msg); return Err(error_msg);
} }
if let Err(e) = app.emit("update-install-started", ()) { if let Err(e) = app.emit("update-install-started", ()) {
log::error!("Failed to emit install started event: {}", e); log::error!("Failed to emit install started event: {e}");
} }
let result = match env::consts::OS { let result = match env::consts::OS {
@@ -370,20 +370,20 @@ pub async fn install_update_and_restart(
log::info!("Update installation started successfully"); log::info!("Update installation started successfully");
if let Err(e) = app.emit("update-install-completed", ()) { if let Err(e) = app.emit("update-install-completed", ()) {
log::error!("Failed to emit install completed event: {}", e); log::error!("Failed to emit install completed event: {e}");
} }
if let Err(e) = app.emit("app-restarting", ()) { if let Err(e) = app.emit("app-restarting", ()) {
log::error!("Failed to emit app restarting event: {}", e); log::error!("Failed to emit app restarting event: {e}");
} }
tokio::time::sleep(Duration::from_millis(1000)).await; tokio::time::sleep(Duration::from_millis(1000)).await;
std::process::exit(0); std::process::exit(0);
} }
Err(e) => { Err(e) => {
log::error!("Update installation failed: {}", e); log::error!("Update installation failed: {e}");
if let Err(emit_err) = app.emit("update-install-error", &e) { if let Err(emit_err) = app.emit("update-install-error", &e) {
log::error!("Failed to emit install error event: {}", emit_err); log::error!("Failed to emit install error event: {emit_err}");
} }
Err(e) Err(e)
} }
@@ -396,13 +396,12 @@ async fn install_windows_update(installer_path: &PathBuf) -> Result<(), String>
let mut cmd = Command::new(installer_path); let mut cmd = Command::new(installer_path);
cmd.arg("/SILENT"); cmd.arg("/SILENT");
tokio::task::spawn_blocking(move || { let _ = tokio::task::spawn_blocking(move || {
cmd.spawn() cmd.spawn()
.map_err(|e| format!("Failed to start Windows installer: {}", e)) .map_err(|e| format!("Failed to start Windows installer: {e}"))
}) })
.await .await
.map_err(|e| format!("Task error: {}", e))? .map_err(|e| format!("Task error: {e}"))?;
.map_err(|e| e)?;
Ok(()) Ok(())
} }
@@ -413,13 +412,12 @@ async fn install_macos_update(installer_path: &PathBuf) -> Result<(), String> {
let mut cmd = Command::new("open"); let mut cmd = Command::new("open");
cmd.arg(installer_path); cmd.arg(installer_path);
tokio::task::spawn_blocking(move || { let _ = tokio::task::spawn_blocking(move || {
cmd.spawn() cmd.spawn()
.map_err(|e| format!("Failed to start macOS installer: {}", e)) .map_err(|e| format!("Failed to start macOS installer: {e}"))
}) })
.await .await
.map_err(|e| format!("Task error: {}", e))? .map_err(|e| format!("Task error: {e}"))?;
.map_err(|e| e)?;
Ok(()) Ok(())
} }
@@ -450,13 +448,12 @@ async fn install_linux_update(installer_path: &PathBuf) -> Result<(), String> {
} }
}; };
tokio::task::spawn_blocking(move || { let _ = tokio::task::spawn_blocking(move || {
cmd.spawn() cmd.spawn()
.map_err(|e| format!("Failed to start Linux installer: {}", e)) .map_err(|e| format!("Failed to start Linux installer: {e}"))
}) })
.await .await
.map_err(|e| format!("Task error: {}", e))? .map_err(|e| format!("Task error: {e}"))?;
.map_err(|e| e)?;
Ok(()) Ok(())
} }
@@ -471,7 +468,7 @@ pub async fn set_auto_check_enabled(
enabled: bool, enabled: bool,
state: State<'_, AppState>, state: State<'_, AppState>,
) -> Result<(), String> { ) -> Result<(), String> {
log::info!("Setting auto-check updates preference to: {}", enabled); log::info!("Setting auto-check updates preference to: {enabled}");
let mut settings = state.get_settings().unwrap_or_else(|| { let mut settings = state.get_settings().unwrap_or_else(|| {
use crate::conf::config::MergedSettings; use crate::conf::config::MergedSettings;
@@ -482,7 +479,7 @@ pub async fn set_auto_check_enabled(
state.update_settings(settings.clone()); state.update_settings(settings.clone());
save_settings(settings, state) save_settings(settings, state)
.await .await
.map_err(|e| format!("Failed to save settings: {}", e))?; .map_err(|e| format!("Failed to save settings: {e}"))?;
Ok(()) Ok(())
} }
@@ -509,15 +506,15 @@ pub async fn perform_background_update_check(app: AppHandle) -> Result<(), Strin
); );
if let Err(e) = app.emit("background-update-available", &update_check) { if let Err(e) = app.emit("background-update-available", &update_check) {
log::error!("Failed to emit background-update-available event: {}", e); log::error!("Failed to emit background-update-available event: {e}");
} }
} else { } else {
log::debug!("Background check: App is up to date"); log::error!("Background check: App is up to date");
} }
Ok(()) Ok(())
} }
Err(e) => { Err(e) => {
log::debug!("Background update check failed: {}", e); log::error!("Background update check failed: {e}");
Ok(()) Ok(())
} }
} }
@@ -528,7 +525,7 @@ pub async fn restart_app(app: AppHandle) {
log::info!("Restarting application..."); log::info!("Restarting application...");
if let Err(e) = app.emit("app-restarting", ()) { if let Err(e) = app.emit("app-restarting", ()) {
log::error!("Failed to emit app-restarting event: {}", e); log::error!("Failed to emit app-restarting event: {e}");
} }
tokio::time::sleep(Duration::from_millis(500)).await; tokio::time::sleep(Duration::from_millis(500)).await;

View File

@@ -7,27 +7,28 @@ use std::str::FromStr;
use tauri::State; use tauri::State;
#[tauri::command] #[tauri::command]
pub async fn get_process_list(state: State<'_, AppState>) -> Result<Vec<ProcessStatus>, String> { pub async fn get_process_list(_state: State<'_, AppState>) -> Result<Vec<ProcessStatus>, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
println!("API Key: {api_key}");
println!("Server Port: {port}");
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.get(format!("http://127.0.0.1:{}/api/v1/processes", port)) .get(format!("http://127.0.0.1:{port}/api/v1/processes"))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let response_text = response let response_text = response
.text() .text()
.await .await
.map_err(|e| format!("Failed to read response text: {}", e))?; .map_err(|e| format!("Failed to read response text: {e}"))?;
let process_list = match serde_json::from_str::<ListProcessResponse>(&response_text) { let process_list = match serde_json::from_str::<ListProcessResponse>(&response_text) {
Ok(process_list) => process_list, Ok(process_list) => process_list,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Failed to parse response: {}, response: {}", "Failed to parse response: {e}, response: {response_text}"
e, response_text
)); ));
} }
}; };
@@ -38,19 +39,18 @@ pub async fn get_process_list(state: State<'_, AppState>) -> Result<Vec<ProcessS
} }
#[tauri::command] #[tauri::command]
pub async fn start_process(id: String, state: State<'_, AppState>) -> Result<bool, String> { pub async fn start_process(id: String, _state: State<'_, AppState>) -> Result<bool, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!( .post(format!(
"http://127.0.0.1:{}/api/v1/processes/{}/start", "http://127.0.0.1:{port}/api/v1/processes/{id}/start"
port, id
)) ))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
} else { } else {
@@ -59,19 +59,18 @@ pub async fn start_process(id: String, state: State<'_, AppState>) -> Result<boo
} }
#[tauri::command] #[tauri::command]
pub async fn stop_process(id: String, state: State<'_, AppState>) -> Result<bool, String> { pub async fn stop_process(id: String, _state: State<'_, AppState>) -> Result<bool, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!( .post(format!(
"http://127.0.0.1:{}/api/v1/processes/{}/stop", "http://127.0.0.1:{port}/api/v1/processes/{id}/stop"
port, id
)) ))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
} else { } else {
@@ -80,32 +79,30 @@ pub async fn stop_process(id: String, state: State<'_, AppState>) -> Result<bool
} }
#[tauri::command] #[tauri::command]
pub async fn restart_process(id: String, state: State<'_, AppState>) -> Result<bool, String> { pub async fn restart_process(id: String, _state: State<'_, AppState>) -> Result<bool, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let stop_response = client let stop_response = client
.post(format!( .post(format!(
"http://127.0.0.1:{}/api/v1/processes/{}/stop", "http://127.0.0.1:{port}/api/v1/processes/{id}/stop"
port, id
)) ))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if stop_response.status().is_success() { if stop_response.status().is_success() {
let start_response = client let start_response = client
.post( .post(
url::Url::from_str(&format!( url::Url::from_str(&format!(
"http://127.0.0.1:{}/api/v1/processes/{}/start", "http://127.0.0.1:{port}/api/v1/processes/{id}/start"
port, id
)) ))
.unwrap(), .unwrap(),
) )
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if start_response.status().is_success() { if start_response.status().is_success() {
Ok(true) Ok(true)
} else { } else {
@@ -126,18 +123,18 @@ pub async fn restart_process(id: String, state: State<'_, AppState>) -> Result<b
pub async fn update_process( pub async fn update_process(
id: String, id: String,
update_config: HashMap<String, String>, update_config: HashMap<String, String>,
state: State<'_, AppState>, _state: State<'_, AppState>,
) -> Result<bool, String> { ) -> Result<bool, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.put(format!("http://127.0.0.1:{}/api/v1/processes/{}", port, id)) .put(format!("http://127.0.0.1:{port}/api/v1/processes/{id}"))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.json(&update_config) .json(&update_config)
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
} else { } else {

View File

@@ -7,10 +7,10 @@ pub async fn get_admin_password() -> Result<String, String> {
let logs_dir = app_dir.join("logs/process_openlist_core.log"); let logs_dir = app_dir.join("logs/process_openlist_core.log");
let logs_content = let logs_content =
std::fs::read_to_string(logs_dir).map_err(|e| format!("Failed to read log file: {}", e))?; std::fs::read_to_string(logs_dir).map_err(|e| format!("Failed to read log file: {e}"))?;
let re = Regex::new(r"Successfully created the admin user and the initial password is: (\w+)") let re = Regex::new(r"Successfully created the admin user and the initial password is: (\w+)")
.map_err(|e| format!("Failed to create regex: {}", e))?; .map_err(|e| format!("Failed to create regex: {e}"))?;
let mut last_password = None; let mut last_password = None;
for line in logs_content.lines() { for line in logs_content.lines() {

View File

@@ -10,15 +10,15 @@ use url::Url;
#[tauri::command] #[tauri::command]
pub async fn create_openlist_core_process( pub async fn create_openlist_core_process(
auto_start: bool, auto_start: bool,
state: State<'_, AppState>, _state: State<'_, AppState>,
) -> Result<ProcessConfig, String> { ) -> Result<ProcessConfig, String> {
let binary_path = get_openlist_binary_path() let binary_path = get_openlist_binary_path()
.map_err(|e| format!("Failed to get OpenList binary path: {}", e))?; .map_err(|e| format!("Failed to get OpenList binary path: {e}"))?;
let log_file_path = let log_file_path =
get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {}", e))?; get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {e}"))?;
let log_file_path = log_file_path.join("process_openlist_core.log"); let log_file_path = log_file_path.join("process_openlist_core.log");
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let config = ProcessConfig { let config = ProcessConfig {
@@ -39,23 +39,22 @@ pub async fn create_openlist_core_process(
}; };
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!("http://127.0.0.1:{}/api/v1/processes", port)) .post(format!("http://127.0.0.1:{port}/api/v1/processes"))
.json(&config) .json(&config)
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let response_text = response let response_text = response
.text() .text()
.await .await
.map_err(|e| format!("Failed to read response text: {}", e))?; .map_err(|e| format!("Failed to read response text: {e}"))?;
let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) { let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) {
Ok(process_config) => process_config, Ok(process_config) => process_config,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Failed to parse response: {}, response: {}", "Failed to parse response: {e}, response: {response_text}"
e, response_text
)); ));
} }
}; };
@@ -85,10 +84,10 @@ pub async fn get_openlist_core_status(state: State<'_, AppState>) -> Result<Serv
let health_check_url = format!("{}://localhost:{}", protocol, openlist_config.port); let health_check_url = format!("{}://localhost:{}", protocol, openlist_config.port);
let url = let url =
Url::parse(&health_check_url).map_err(|e| format!("Invalid health check URL: {}", e))?; Url::parse(&health_check_url).map_err(|e| format!("Invalid health check URL: {e}"))?;
let port = url.port_or_known_default(); let port = url.port_or_known_default();
let health_url = format!("{}/ping", health_check_url); let health_url = format!("{health_check_url}/ping");
let local_pid = None; let local_pid = None;
match reqwest::get(&health_url).await { match reqwest::get(&health_url).await {

View File

@@ -1,5 +1,5 @@
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use tauri::{AppHandle, State}; use tauri::{AppHandle, State};
use crate::cmd::http_api::{get_process_list, start_process, stop_process}; use crate::cmd::http_api::{get_process_list, start_process, stop_process};
@@ -11,14 +11,14 @@ fn normalize_path(path: &str) -> String {
{ {
let normalized = path.replace('/', "\\"); let normalized = path.replace('/', "\\");
if normalized.len() == 2 && normalized.chars().nth(1) == Some(':') { if normalized.len() == 2 && normalized.chars().nth(1) == Some(':') {
format!("{}\\", normalized) format!("{normalized}\\")
} else if normalized.len() > 2 } else if normalized.len() > 2
&& normalized.chars().nth(1) == Some(':') && normalized.chars().nth(1) == Some(':')
&& normalized.chars().nth(2) != Some('\\') && normalized.chars().nth(2) != Some('\\')
{ {
let drive = &normalized[..2]; let drive = &normalized[..2];
let rest = &normalized[2..]; let rest = &normalized[2..];
format!("{}\\{}", drive, rest) format!("{drive}\\{rest}")
} else { } else {
normalized normalized
} }
@@ -159,11 +159,11 @@ pub async fn update_tool_version(
version: String, version: String,
state: State<'_, AppState>, state: State<'_, AppState>,
) -> Result<String, String> { ) -> Result<String, String> {
log::info!("Updating {} to version {}", tool, version); log::info!("Updating {tool} to version {version}");
let process_list = get_process_list(state.clone()) let process_list = get_process_list(state.clone())
.await .await
.map_err(|e| format!("Failed to get process list: {}", e))?; .map_err(|e| format!("Failed to get process list: {e}"))?;
let process_name = match tool.as_str() { let process_name = match tool.as_str() {
"openlist" => "single_openlist_core_process", "openlist" => "single_openlist_core_process",
@@ -178,16 +178,16 @@ pub async fn update_tool_version(
if was_running { if was_running {
if let Some(pid) = &process_id { if let Some(pid) = &process_id {
log::info!("Stopping {} process with ID: {}", tool, pid); log::info!("Stopping {tool} process with ID: {pid}");
match tool.as_str() { match tool.as_str() {
"openlist" | "rclone" => { "openlist" | "rclone" => {
stop_process(pid.clone(), state.clone()) stop_process(pid.clone(), state.clone())
.await .await
.map_err(|e| format!("Failed to stop process: {}", e))?; .map_err(|e| format!("Failed to stop process: {e}"))?;
} }
_ => return Err("Unsupported tool".to_string()), _ => return Err("Unsupported tool".to_string()),
} }
log::info!("Successfully stopped {} process", tool); log::info!("Successfully stopped {tool} process");
} }
} }
@@ -195,33 +195,32 @@ pub async fn update_tool_version(
match result { match result {
Ok(_) => { Ok(_) => {
log::info!("Successfully downloaded and replaced {} binary", tool); log::info!("Successfully downloaded and replaced {tool} binary");
if was_running { if was_running {
if let Some(pid) = &process_id { if let Some(pid) = &process_id {
log::info!("Starting {} process with ID: {}", tool, pid); log::info!("Starting {tool} process with ID: {pid}");
match tool.as_str() { match tool.as_str() {
"openlist" | "rclone" => { "openlist" | "rclone" => {
start_process(pid.clone(), state.clone()) start_process(pid.clone(), state.clone())
.await .await
.map_err(|e| format!("Failed to start {} process: {}", tool, e))?; .map_err(|e| format!("Failed to start {tool} process: {e}"))?;
} }
_ => return Err("Unsupported tool".to_string()), _ => return Err("Unsupported tool".to_string()),
} }
log::info!("Successfully restarted {} process", tool); log::info!("Successfully restarted {tool} process");
} }
} }
Ok(format!("Successfully updated {} to {}", tool, version)) Ok(format!("Successfully updated {tool} to {version}"))
} }
Err(e) => { Err(e) => {
log::error!("Failed to update {} binary: {}", tool, e); log::error!("Failed to update {tool} binary: {e}");
if was_running { if was_running {
if let Some(pid) = &process_id { if let Some(pid) = &process_id {
log::info!( log::info!(
"Attempting to restart {} with previous binary after update failure", "Attempting to restart {tool} with previous binary after update failure"
tool
); );
match tool.as_str() { match tool.as_str() {
"openlist" | "rclone" => { "openlist" | "rclone" => {
@@ -232,7 +231,7 @@ pub async fn update_tool_version(
} }
} }
Err(format!("Failed to update {} to {}: {}", tool, version, e)) Err(format!("Failed to update {tool} to {version}: {e}"))
} }
} }
} }
@@ -247,29 +246,29 @@ async fn download_and_replace_binary(tool: &str, version: &str) -> Result<(), St
"windows" => "win32", "windows" => "win32",
"macos" => "darwin", "macos" => "darwin",
"linux" => "linux", "linux" => "linux",
_ => return Err(format!("Unsupported platform: {}", platform)), _ => return Err(format!("Unsupported platform: {platform}")),
}, },
match arch { match arch {
"x86_64" => "x64", "x86_64" => "x64",
"x86" => "ia32", "x86" => "ia32",
"aarch64" => "arm64", "aarch64" => "arm64",
"arm" => "arm", "arm" => "arm",
_ => return Err(format!("Unsupported architecture: {}", arch)), _ => return Err(format!("Unsupported architecture: {arch}")),
} }
); );
log::info!("Detected platform: {}", platform_arch); log::info!("Detected platform: {platform_arch}");
let (binary_path, download_info) = match tool { let (binary_path, download_info) = match tool {
"openlist" => { "openlist" => {
let path = get_openlist_binary_path() let path = get_openlist_binary_path()
.map_err(|e| format!("Failed to get OpenList binary path: {}", e))?; .map_err(|e| format!("Failed to get OpenList binary path: {e}"))?;
let info = get_openlist_download_info(&platform_arch, version)?; let info = get_openlist_download_info(&platform_arch, version)?;
(path, info) (path, info)
} }
"rclone" => { "rclone" => {
let path = get_rclone_binary_path() let path = get_rclone_binary_path()
.map_err(|e| format!("Failed to get Rclone binary path: {}", e))?; .map_err(|e| format!("Failed to get Rclone binary path: {e}"))?;
let info = get_rclone_download_info(&platform_arch, version)?; let info = get_rclone_download_info(&platform_arch, version)?;
(path, info) (path, info)
} }
@@ -278,8 +277,8 @@ async fn download_and_replace_binary(tool: &str, version: &str) -> Result<(), St
log::info!("Downloading {} from: {}", tool, download_info.download_url); log::info!("Downloading {} from: {}", tool, download_info.download_url);
let temp_dir = std::env::temp_dir().join(format!("{}-update-{}", tool, version)); let temp_dir = std::env::temp_dir().join(format!("{tool}-update-{version}"));
fs::create_dir_all(&temp_dir).map_err(|e| format!("Failed to create temp directory: {}", e))?; fs::create_dir_all(&temp_dir).map_err(|e| format!("Failed to create temp directory: {e}"))?;
let archive_path = temp_dir.join(&download_info.archive_name); let archive_path = temp_dir.join(&download_info.archive_name);
download_file(&download_info.download_url, &archive_path).await?; download_file(&download_info.download_url, &archive_path).await?;
@@ -302,7 +301,7 @@ async fn download_and_replace_binary(tool: &str, version: &str) -> Result<(), St
if binary_path.exists() { if binary_path.exists() {
fs::copy(&binary_path, &backup_path) fs::copy(&binary_path, &backup_path)
.map_err(|e| format!("Failed to backup current binary: {}", e))?; .map_err(|e| format!("Failed to backup current binary: {e}"))?;
} }
fs::copy(&extracted_binary_path, &binary_path).map_err(|e| { fs::copy(&extracted_binary_path, &binary_path).map_err(|e| {
@@ -310,7 +309,7 @@ async fn download_and_replace_binary(tool: &str, version: &str) -> Result<(), St
let _ = fs::copy(&backup_path, &binary_path); let _ = fs::copy(&backup_path, &binary_path);
let _ = fs::remove_file(&backup_path); let _ = fs::remove_file(&backup_path);
} }
format!("Failed to replace binary: {}", e) format!("Failed to replace binary: {e}")
})?; })?;
if backup_path.exists() { if backup_path.exists() {
@@ -321,17 +320,17 @@ async fn download_and_replace_binary(tool: &str, version: &str) -> Result<(), St
{ {
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&binary_path) let mut perms = fs::metadata(&binary_path)
.map_err(|e| format!("Failed to get binary metadata: {}", e))? .map_err(|e| format!("Failed to get binary metadata: {e}"))?
.permissions(); .permissions();
perms.set_mode(0o755); perms.set_mode(0o755);
fs::set_permissions(&binary_path, perms) fs::set_permissions(&binary_path, perms)
.map_err(|e| format!("Failed to set executable permissions: {}", e))?; .map_err(|e| format!("Failed to set executable permissions: {e}"))?;
} }
let _ = fs::remove_file(&extracted_binary_path); let _ = fs::remove_file(&extracted_binary_path);
let _ = fs::remove_dir_all(&temp_dir); let _ = fs::remove_dir_all(&temp_dir);
log::info!("Successfully replaced {} binary", tool); log::info!("Successfully replaced {tool} binary");
Ok(()) Ok(())
} }
@@ -349,11 +348,10 @@ fn get_openlist_download_info(platform_arch: &str, version: &str) -> Result<Down
let archive_ext = if is_unix { "tar.gz" } else { "zip" }; let archive_ext = if is_unix { "tar.gz" } else { "zip" };
let exe_ext = if is_windows { ".exe" } else { "" }; let exe_ext = if is_windows { ".exe" } else { "" };
let archive_name = format!("openlist-{}.{}", arch_map, archive_ext); let archive_name = format!("openlist-{arch_map}.{archive_ext}");
let executable_name = format!("openlist{}", exe_ext); let executable_name = format!("openlist{exe_ext}");
let download_url = format!( let download_url = format!(
"https://github.com/OpenListTeam/OpenList/releases/download/{}/{}", "https://github.com/OpenListTeam/OpenList/releases/download/{version}/{archive_name}"
version, archive_name
); );
Ok(DownloadInfo { Ok(DownloadInfo {
@@ -368,12 +366,10 @@ fn get_rclone_download_info(platform_arch: &str, version: &str) -> Result<Downlo
let is_windows = platform_arch.starts_with("win32"); let is_windows = platform_arch.starts_with("win32");
let exe_ext = if is_windows { ".exe" } else { "" }; let exe_ext = if is_windows { ".exe" } else { "" };
let archive_name = format!("rclone-{}-{}.zip", version, arch_map); let archive_name = format!("rclone-{version}-{arch_map}.zip");
let executable_name = format!("rclone{}", exe_ext); let executable_name = format!("rclone{exe_ext}");
let download_url = format!( let download_url =
"https://github.com/rclone/rclone/releases/download/{}/{}", format!("https://github.com/rclone/rclone/releases/download/{version}/{archive_name}");
version, archive_name
);
Ok(DownloadInfo { Ok(DownloadInfo {
download_url, download_url,
@@ -394,8 +390,7 @@ fn get_openlist_arch_mapping(platform_arch: &str) -> Result<&'static str, String
"linux-arm64" => Ok("linux-arm64"), "linux-arm64" => Ok("linux-arm64"),
"linux-arm" => Ok("linux-arm-7"), "linux-arm" => Ok("linux-arm-7"),
_ => Err(format!( _ => Err(format!(
"Unsupported platform architecture: {}", "Unsupported platform architecture: {platform_arch}"
platform_arch
)), )),
} }
} }
@@ -412,25 +407,24 @@ fn get_rclone_arch_mapping(platform_arch: &str) -> Result<&'static str, String>
"linux-arm64" => Ok("linux-arm64"), "linux-arm64" => Ok("linux-arm64"),
"linux-arm" => Ok("linux-arm-v7"), "linux-arm" => Ok("linux-arm-v7"),
_ => Err(format!( _ => Err(format!(
"Unsupported platform architecture: {}", "Unsupported platform architecture: {platform_arch}"
platform_arch
)), )),
} }
} }
async fn download_file(url: &str, path: &PathBuf) -> Result<(), String> { async fn download_file(url: &str, path: &PathBuf) -> Result<(), String> {
log::info!("Downloading file from: {}", url); log::info!("Downloading file from: {url}");
let client = reqwest::Client::builder() let client = reqwest::Client::builder()
.user_agent("OpenList Desktop/1.0") .user_agent("OpenList Desktop/1.0")
.build() .build()
.map_err(|e| format!("Failed to create HTTP client: {}", e))?; .map_err(|e| format!("Failed to create HTTP client: {e}"))?;
let response = client let response = client
.get(url) .get(url)
.send() .send()
.await .await
.map_err(|e| format!("Failed to download file: {}", e))?; .map_err(|e| format!("Failed to download file: {e}"))?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(format!( return Err(format!(
@@ -442,21 +436,21 @@ async fn download_file(url: &str, path: &PathBuf) -> Result<(), String> {
let bytes = response let bytes = response
.bytes() .bytes()
.await .await
.map_err(|e| format!("Failed to read response bytes: {}", e))?; .map_err(|e| format!("Failed to read response bytes: {e}"))?;
fs::write(path, &bytes).map_err(|e| format!("Failed to write file: {}", e))?; fs::write(path, &bytes).map_err(|e| format!("Failed to write file: {e}"))?;
log::info!("Downloaded file to: {:?}", path); log::info!("Downloaded file to: {path:?}");
Ok(()) Ok(())
} }
async fn extract_binary( async fn extract_binary(
archive_path: &PathBuf, archive_path: &PathBuf,
extract_dir: &PathBuf, extract_dir: &Path,
executable_name: &str, executable_name: &str,
tool: &str, tool: &str,
) -> Result<PathBuf, String> { ) -> Result<PathBuf, String> {
log::info!("Extracting archive: {:?}", archive_path); log::info!("Extracting archive: {archive_path:?}");
let archive_name = archive_path let archive_name = archive_path
.file_name() .file_name()
@@ -468,44 +462,43 @@ async fn extract_binary(
} else if archive_name.ends_with(".tar.gz") { } else if archive_name.ends_with(".tar.gz") {
extract_tar_gz(archive_path, extract_dir, executable_name, tool) extract_tar_gz(archive_path, extract_dir, executable_name, tool)
} else { } else {
Err(format!("Unsupported archive format: {}", archive_name)) Err(format!("Unsupported archive format: {archive_name}"))
} }
} }
fn extract_zip( fn extract_zip(
archive_path: &PathBuf, archive_path: &PathBuf,
extract_dir: &PathBuf, extract_dir: &Path,
executable_name: &str, executable_name: &str,
tool: &str, tool: &str,
) -> Result<PathBuf, String> { ) -> Result<PathBuf, String> {
let file = let file = fs::File::open(archive_path).map_err(|e| format!("Failed to open zip file: {e}"))?;
fs::File::open(archive_path).map_err(|e| format!("Failed to open zip file: {}", e))?;
let mut archive = let mut archive =
zip::ZipArchive::new(file).map_err(|e| format!("Failed to read zip archive: {}", e))?; zip::ZipArchive::new(file).map_err(|e| format!("Failed to read zip archive: {e}"))?;
let mut executable_path = None; let mut executable_path = None;
for i in 0..archive.len() { for i in 0..archive.len() {
let mut file = archive let mut file = archive
.by_index(i) .by_index(i)
.map_err(|e| format!("Failed to read zip entry: {}", e))?; .map_err(|e| format!("Failed to read zip entry: {e}"))?;
let file_name = file.name(); let file_name = file.name();
let is_target_executable = if tool == "rclone" { let is_target_executable = if tool == "rclone" {
file_name.ends_with(executable_name) && file_name.contains("rclone") file_name.ends_with(executable_name) && file_name.contains("rclone")
} else { } else {
file_name == executable_name || file_name.ends_with(&format!("/{}", executable_name)) file_name == executable_name || file_name.ends_with(&format!("/{executable_name}"))
}; };
if is_target_executable { if is_target_executable {
let output_path = extract_dir.join(executable_name); let output_path = extract_dir.join(executable_name);
let mut output_file = fs::File::create(&output_path) let mut output_file = fs::File::create(&output_path)
.map_err(|e| format!("Failed to create output file: {}", e))?; .map_err(|e| format!("Failed to create output file: {e}"))?;
std::io::copy(&mut file, &mut output_file) std::io::copy(&mut file, &mut output_file)
.map_err(|e| format!("Failed to extract file: {}", e))?; .map_err(|e| format!("Failed to extract file: {e}"))?;
executable_path = Some(output_path); executable_path = Some(output_path);
break; break;
@@ -513,12 +506,12 @@ fn extract_zip(
} }
executable_path executable_path
.ok_or_else(|| format!("Executable '{}' not found in zip archive", executable_name)) .ok_or_else(|| format!("Executable '{executable_name}' not found in zip archive"))
} }
fn extract_tar_gz( fn extract_tar_gz(
archive_path: &PathBuf, archive_path: &PathBuf,
extract_dir: &PathBuf, extract_dir: &Path,
executable_name: &str, executable_name: &str,
_tool: &str, _tool: &str,
) -> Result<PathBuf, String> { ) -> Result<PathBuf, String> {
@@ -526,7 +519,7 @@ fn extract_tar_gz(
use tar::Archive; use tar::Archive;
let file = let file =
fs::File::open(archive_path).map_err(|e| format!("Failed to open tar.gz file: {}", e))?; fs::File::open(archive_path).map_err(|e| format!("Failed to open tar.gz file: {e}"))?;
let gz_decoder = GzDecoder::new(file); let gz_decoder = GzDecoder::new(file);
let mut archive = Archive::new(gz_decoder); let mut archive = Archive::new(gz_decoder);
@@ -535,21 +528,21 @@ fn extract_tar_gz(
for entry in archive for entry in archive
.entries() .entries()
.map_err(|e| format!("Failed to read tar entries: {}", e))? .map_err(|e| format!("Failed to read tar entries: {e}"))?
{ {
let mut entry = entry.map_err(|e| format!("Failed to read tar entry: {}", e))?; let mut entry = entry.map_err(|e| format!("Failed to read tar entry: {e}"))?;
let path = entry let path = entry
.path() .path()
.map_err(|e| format!("Failed to get entry path: {}", e))?; .map_err(|e| format!("Failed to get entry path: {e}"))?;
if let Some(file_name) = path.file_name() { if let Some(file_name) = path.file_name() {
if file_name == executable_name { if file_name == executable_name {
let output_path = extract_dir.join(executable_name); let output_path = extract_dir.join(executable_name);
let mut output_file = fs::File::create(&output_path) let mut output_file = fs::File::create(&output_path)
.map_err(|e| format!("Failed to create output file: {}", e))?; .map_err(|e| format!("Failed to create output file: {e}"))?;
std::io::copy(&mut entry, &mut output_file) std::io::copy(&mut entry, &mut output_file)
.map_err(|e| format!("Failed to extract file: {}", e))?; .map_err(|e| format!("Failed to extract file: {e}"))?;
executable_path = Some(output_path); executable_path = Some(output_path);
break; break;
@@ -557,10 +550,6 @@ fn extract_tar_gz(
} }
} }
executable_path.ok_or_else(|| { executable_path
format!( .ok_or_else(|| format!("Executable '{executable_name}' not found in tar.gz archive"))
"Executable '{}' not found in tar.gz archive",
executable_name
)
})
} }

View File

@@ -34,18 +34,18 @@ pub async fn create_and_start_rclone_backend(
#[tauri::command] #[tauri::command]
pub async fn create_rclone_backend_process( pub async fn create_rclone_backend_process(
state: State<'_, AppState>, _state: State<'_, AppState>,
) -> Result<ProcessConfig, String> { ) -> Result<ProcessConfig, String> {
let binary_path = let binary_path =
get_rclone_binary_path().map_err(|e| format!("Failed to get rclone binary path: {}", e))?; get_rclone_binary_path().map_err(|e| format!("Failed to get rclone binary path: {e}"))?;
let log_file_path = let log_file_path =
get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {}", e))?; get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {e}"))?;
let rclone_conf_path = binary_path let rclone_conf_path = binary_path
.parent() .parent()
.map(|p| p.join("rclone.conf")) .map(|p| p.join("rclone.conf"))
.ok_or_else(|| "Failed to determine rclone.conf path".to_string())?; .ok_or_else(|| "Failed to determine rclone.conf path".to_string())?;
let log_file_path = log_file_path.join("process_rclone.log"); let log_file_path = log_file_path.join("process_rclone.log");
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let config = ProcessConfig { let config = ProcessConfig {
id: "rclone_backend".into(), id: "rclone_backend".into(),
@@ -76,23 +76,22 @@ pub async fn create_rclone_backend_process(
}; };
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!("http://127.0.0.1:{}/api/v1/processes", port)) .post(format!("http://127.0.0.1:{port}/api/v1/processes"))
.json(&config) .json(&config)
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let response_text = response let response_text = response
.text() .text()
.await .await
.map_err(|e| format!("Failed to read response text: {}", e))?; .map_err(|e| format!("Failed to read response text: {e}"))?;
let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) { let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) {
Ok(process_config) => process_config, Ok(process_config) => process_config,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Failed to parse response: {}, response: {}", "Failed to parse response: {e}, response: {response_text}"
e, response_text
)); ));
} }
}; };
@@ -114,7 +113,7 @@ async fn is_rclone_running() -> bool {
let client = Client::new(); let client = Client::new();
let response = client let response = client
.get(&format!("{}/", RCLONE_API_BASE)) .get(format!("{RCLONE_API_BASE}/"))
.timeout(Duration::from_secs(1)) .timeout(Duration::from_secs(1))
.send() .send()
.await; .await;

View File

@@ -20,39 +20,38 @@ pub async fn rclone_list_config(
) -> Result<serde_json::Value, String> { ) -> Result<serde_json::Value, String> {
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(format!("{}/config/dump", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/config/dump"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let response_text = response let response_text = response
.text() .text()
.await .await
.map_err(|e| format!("Failed to read response text: {}", e))?; .map_err(|e| format!("Failed to read response text: {e}"))?;
let json: serde_json::Value = serde_json::from_str(&response_text) let json: serde_json::Value = serde_json::from_str(&response_text)
.map_err(|e| format!("Failed to parse JSON: {}", e))?; .map_err(|e| format!("Failed to parse JSON: {e}"))?;
let remotes = if remote_type.is_empty() { let remotes = if remote_type.is_empty() {
json.clone() json.clone()
} else { } else if let Some(obj) = json.as_object() {
if let Some(obj) = json.as_object() { let mut filtered_map = serde_json::Map::new();
let mut filtered_map = serde_json::Map::new(); for (remote_name, remote_config) in obj {
for (remote_name, remote_config) in obj { if let Some(config_obj) = remote_config.as_object() {
if let Some(config_obj) = remote_config.as_object() { if let Some(remote_type_value) = config_obj.get("type") {
if let Some(remote_type_value) = config_obj.get("type") { if let Some(type_str) = remote_type_value.as_str() {
if let Some(type_str) = remote_type_value.as_str() { if type_str == remote_type {
if type_str == remote_type { filtered_map.insert(remote_name.clone(), remote_config.clone());
filtered_map.insert(remote_name.clone(), remote_config.clone());
}
} }
} }
} }
} }
serde_json::Value::Object(filtered_map)
} else {
serde_json::Value::Object(serde_json::Map::new())
} }
serde_json::Value::Object(filtered_map)
} else {
serde_json::Value::Object(serde_json::Map::new())
}; };
Ok(remotes) Ok(remotes)
} else { } else {
Err(format!( Err(format!(
@@ -67,24 +66,24 @@ pub async fn rclone_list_remotes() -> Result<Vec<String>, String> {
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/config/listremotes", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/config/listremotes"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.send() .send()
.await .await
.map_err(|e| format!("Failed to list remotes: {}", e))?; .map_err(|e| format!("Failed to list remotes: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let remote_list: RcloneRemoteListResponse = response let remote_list: RcloneRemoteListResponse = response
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse remote list response: {}", e))?; .map_err(|e| format!("Failed to parse remote list response: {e}"))?;
Ok(remote_list.remotes) Ok(remote_list.remotes)
} else { } else {
let error_text = response let error_text = response
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to list remotes: {}", error_text)) Err(format!("Failed to list remotes: {error_text}"))
} }
} }
@@ -93,24 +92,24 @@ pub async fn rclone_list_mounts() -> Result<RcloneMountListResponse, String> {
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/mount/listmounts", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/mount/listmounts"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.send() .send()
.await .await
.map_err(|e| format!("Failed to list mounts: {}", e))?; .map_err(|e| format!("Failed to list mounts: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let mount_list: RcloneMountListResponse = response let mount_list: RcloneMountListResponse = response
.json() .json()
.await .await
.map_err(|e| format!("Failed to parse mount list response: {}", e))?; .map_err(|e| format!("Failed to parse mount list response: {e}"))?;
Ok(mount_list) Ok(mount_list)
} else { } else {
let error_text = response let error_text = response
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to list mounts: {}", error_text)) Err(format!("Failed to list mounts: {error_text}"))
} }
} }
@@ -135,13 +134,13 @@ pub async fn rclone_create_remote(
}; };
let response = client let response = client
.post(&format!("{}/config/create", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/config/create"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&create_request) .json(&create_request)
.send() .send()
.await .await
.map_err(|e| format!("Failed to create remote config: {}", e))?; .map_err(|e| format!("Failed to create remote config: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
@@ -150,7 +149,7 @@ pub async fn rclone_create_remote(
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to create remote config: {}", error_text)) Err(format!("Failed to create remote config: {error_text}"))
} }
} }
@@ -164,13 +163,13 @@ pub async fn rclone_update_remote(
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/config/update", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/config/update"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&json!({ "name": name, "type": r#type, "parameters": config })) .json(&json!({ "name": name, "type": r#type, "parameters": config }))
.send() .send()
.await .await
.map_err(|e| format!("Failed to update remote config: {}", e))?; .map_err(|e| format!("Failed to update remote config: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
@@ -179,7 +178,7 @@ pub async fn rclone_update_remote(
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to update remote config: {}", error_text)) Err(format!("Failed to update remote config: {error_text}"))
} }
} }
@@ -191,13 +190,13 @@ pub async fn rclone_delete_remote(
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/config/delete", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/config/delete"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&json!({ "name": name })) .json(&json!({ "name": name }))
.send() .send()
.await .await
.map_err(|e| format!("Failed to delete remote config: {}", e))?; .map_err(|e| format!("Failed to delete remote config: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
@@ -206,7 +205,7 @@ pub async fn rclone_delete_remote(
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to delete remote config: {}", error_text)) Err(format!("Failed to delete remote config: {error_text}"))
} }
} }
@@ -218,13 +217,13 @@ pub async fn rclone_mount_remote(
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/mount/mount", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/mount/mount"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&mount_request) .json(&mount_request)
.send() .send()
.await .await
.map_err(|e| format!("Failed to mount remote: {}", e))?; .map_err(|e| format!("Failed to mount remote: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
@@ -233,7 +232,7 @@ pub async fn rclone_mount_remote(
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to mount remote: {}", error_text)) Err(format!("Failed to mount remote: {error_text}"))
} }
} }
@@ -245,13 +244,13 @@ pub async fn rclone_unmount_remote(
let client = Client::new(); let client = Client::new();
let response = client let response = client
.post(&format!("{}/mount/unmount", RCLONE_API_BASE)) .post(format!("{RCLONE_API_BASE}/mount/unmount"))
.header("Authorization", RCLONE_AUTH) .header("Authorization", RCLONE_AUTH)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&json!({ "mountPoint": mount_point })) .json(&json!({ "mountPoint": mount_point }))
.send() .send()
.await .await
.map_err(|e| format!("Failed to unmount remote: {}", e))?; .map_err(|e| format!("Failed to unmount remote: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
@@ -260,26 +259,26 @@ pub async fn rclone_unmount_remote(
.text() .text()
.await .await
.unwrap_or_else(|_| "Unknown error".to_string()); .unwrap_or_else(|_| "Unknown error".to_string());
Err(format!("Failed to unmount remote: {}", error_text)) Err(format!("Failed to unmount remote: {error_text}"))
} }
} }
#[tauri::command] #[tauri::command]
pub async fn create_rclone_mount_remote_process( pub async fn create_rclone_mount_remote_process(
config: ProcessConfig, config: ProcessConfig,
state: State<'_, AppState>, _state: State<'_, AppState>,
) -> Result<ProcessConfig, String> { ) -> Result<ProcessConfig, String> {
let binary_path = let binary_path =
get_rclone_binary_path().map_err(|e| format!("Failed to get rclone binary path: {}", e))?; get_rclone_binary_path().map_err(|e| format!("Failed to get rclone binary path: {e}"))?;
let log_file_path = let log_file_path =
get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {}", e))?; get_app_logs_dir().map_err(|e| format!("Failed to get app logs directory: {e}"))?;
let log_file_path = log_file_path.join("process_rclone.log"); let log_file_path = log_file_path.join("process_rclone.log");
let rclone_conf_path = binary_path let rclone_conf_path = binary_path
.parent() .parent()
.map(|p| p.join("rclone.conf")) .map(|p| p.join("rclone.conf"))
.ok_or_else(|| "Failed to determine rclone.conf path".to_string())?; .ok_or_else(|| "Failed to determine rclone.conf path".to_string())?;
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let mut args: Vec<String> = vec![ let mut args: Vec<String> = vec![
"mount".into(), "mount".into(),
@@ -306,23 +305,22 @@ pub async fn create_rclone_mount_remote_process(
}; };
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!("http://127.0.0.1:{}/api/v1/processes", port)) .post(format!("http://127.0.0.1:{port}/api/v1/processes"))
.json(&config) .json(&config)
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
let response_text = response let response_text = response
.text() .text()
.await .await
.map_err(|e| format!("Failed to read response text: {}", e))?; .map_err(|e| format!("Failed to read response text: {e}"))?;
let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) { let process_config = match serde_json::from_str::<CreateProcessResponse>(&response_text) {
Ok(process_config) => process_config, Ok(process_config) => process_config,
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Failed to parse response: {}, response: {}", "Failed to parse response: {e}, response: {response_text}"
e, response_text
)); ));
} }
}; };
@@ -347,7 +345,7 @@ pub async fn check_mount_status(
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
{ {
if mount_point.len() == 2 && mount_point.ends_with(':') { if mount_point.len() == 2 && mount_point.ends_with(':') {
let drive_path = format!("{}\\", mount_point); let drive_path = format!("{mount_point}\\");
match fs::read_dir(&drive_path) { match fs::read_dir(&drive_path) {
Ok(_) => return Ok(true), Ok(_) => return Ok(true),
Err(_) => return Ok(false), Err(_) => return Ok(false),
@@ -387,12 +385,10 @@ pub async fn get_mount_info_list(
Ok(is_mounted) => { Ok(is_mounted) => {
if process.is_running { if process.is_running {
if is_mounted { "mounted" } else { "mounting" } if is_mounted { "mounted" } else { "mounting" }
} else if is_mounted {
"unmounting"
} else { } else {
if is_mounted { "unmounted"
"unmounting"
} else {
"unmounted"
}
} }
} }
Err(_) => "error", Err(_) => "error",

View File

@@ -24,16 +24,16 @@ pub async fn uninstall_service() -> Result<bool, String> {
} }
#[tauri::command] #[tauri::command]
pub async fn stop_service(state: State<'_, AppState>) -> Result<bool, String> { pub async fn stop_service(_state: State<'_, AppState>) -> Result<bool, String> {
let api_key = get_api_key(state); let api_key = get_api_key();
let port = get_server_port(); let port = get_server_port();
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = client let response = client
.post(format!("http://127.0.0.1:{}/api/v1/service/stop", port)) .post(format!("http://127.0.0.1:{port}/api/v1/service/stop"))
.header("Authorization", format!("Bearer {}", api_key)) .header("Authorization", format!("Bearer {api_key}"))
.send() .send()
.await .await
.map_err(|e| format!("Failed to send request: {}", e))?; .map_err(|e| format!("Failed to send request: {e}"))?;
if response.status().is_success() { if response.status().is_success() {
Ok(true) Ok(true)
} else { } else {

View File

@@ -26,16 +26,16 @@ impl MergedSettings {
} }
} }
fn get_data_config_path() -> Result<PathBuf, String> { pub fn get_data_config_path() -> Result<PathBuf, String> {
let app_dir = std::env::current_exe() let app_dir = std::env::current_exe()
.map_err(|e| format!("Failed to get current exe path: {}", e))? .map_err(|e| format!("Failed to get current exe path: {e}"))?
.parent() .parent()
.ok_or("Failed to get parent directory")? .ok_or("Failed to get parent directory")?
.to_path_buf(); .to_path_buf();
Ok(app_dir.join("data").join("config.json")) Ok(app_dir.join("data").join("config.json"))
} }
fn read_data_config() -> Result<serde_json::Value, String> { pub fn read_data_config() -> Result<serde_json::Value, String> {
let path = Self::get_data_config_path()?; let path = Self::get_data_config_path()?;
if !path.exists() { if !path.exists() {
return Err("data/config.json does not exist".to_string()); return Err("data/config.json does not exist".to_string());
@@ -78,12 +78,10 @@ impl MergedSettings {
serde_json::from_str(&config).map_err(|e| e.to_string())? serde_json::from_str(&config).map_err(|e| e.to_string())?
}; };
if let Ok(data_port) = Self::get_port_from_data_config() { if let Ok(Some(port)) = Self::get_port_from_data_config() {
if let Some(port) = data_port { if merged_settings.openlist.port != port {
if merged_settings.openlist.port != port { merged_settings.openlist.port = port;
merged_settings.openlist.port = port; merged_settings.save()?;
merged_settings.save()?;
}
} }
} }

View File

@@ -31,8 +31,7 @@ pub async fn install_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to install service, exit status: {}", "Failed to install service, exit status: {status}"
status
))) )))
} }
} }
@@ -47,7 +46,7 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
let uninstall_path = app_dir.join("uninstall-openlist-service.exe"); let uninstall_path = app_dir.join("uninstall-openlist-service.exe");
if !uninstall_path.exists() { if !uninstall_path.exists() {
error!("Uninstaller not found: {:?}", uninstall_path); error!("Uninstaller not found: {uninstall_path:?}");
return Err(Box::from(format!( return Err(Box::from(format!(
"uninstaller not found: {uninstall_path:?}" "uninstaller not found: {uninstall_path:?}"
))); )));
@@ -66,8 +65,7 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to uninstall service, exit status: {}", "Failed to uninstall service, exit status: {status}"
status
))) )))
} }
} }
@@ -102,8 +100,7 @@ pub async fn install_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to install service, exit status: {}", "Failed to install service, exit status: {status}"
status
))) )))
} }
} }
@@ -116,10 +113,9 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
let uninstall_path = app_dir.join("uninstall-openlist-service"); let uninstall_path = app_dir.join("uninstall-openlist-service");
if !uninstall_path.exists() { if !uninstall_path.exists() {
error!("Uninstaller not found: {:?}", uninstall_path); error!("Uninstaller not found: {uninstall_path:?}");
return Err(Box::from(format!( return Err(Box::from(format!(
"Uninstaller not found: {:?}", "Uninstaller not found: {uninstall_path:?}"
uninstall_path
))); )));
} }
@@ -139,8 +135,7 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to uninstall service, exit status: {}", "Failed to uninstall service, exit status: {status}"
status
))) )))
} }
} }
@@ -172,7 +167,7 @@ fn start_service_with_elevation(service_name: &str) -> Result<bool, Box<dyn std:
let token = Token::with_current_process()?; let token = Token::with_current_process()?;
let level = token.privilege_level()?; let level = token.privilege_level()?;
let powershell_cmd = format!("Start-Service -Name '{}'", service_name); let powershell_cmd = format!("Start-Service -Name '{service_name}'");
let status = match level { let status = match level {
PrivilegeLevel::NotPrivileged => { PrivilegeLevel::NotPrivileged => {
@@ -185,7 +180,7 @@ fn start_service_with_elevation(service_name: &str) -> Result<bool, Box<dyn std:
_ => { _ => {
log::info!("Already have admin privileges, running directly"); log::info!("Already have admin privileges, running directly");
StdCommand::new("powershell.exe") StdCommand::new("powershell.exe")
.args(&["-Command", &powershell_cmd]) .args(["-Command", &powershell_cmd])
.creation_flags(0x08000000) .creation_flags(0x08000000)
.status()? .status()?
} }
@@ -195,10 +190,7 @@ fn start_service_with_elevation(service_name: &str) -> Result<bool, Box<dyn std:
log::info!("Service started successfully via PowerShell"); log::info!("Service started successfully via PowerShell");
Ok(true) Ok(true)
} else { } else {
log::error!( log::error!("Failed to start service via PowerShell, exit code: {status}");
"Failed to start service via PowerShell, exit code: {}",
status
);
Ok(false) Ok(false)
} }
} }
@@ -219,7 +211,7 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
let service = match manager.open_service(service_name, ServiceAccess::QUERY_STATUS) { let service = match manager.open_service(service_name, ServiceAccess::QUERY_STATUS) {
Ok(svc) => svc, Ok(svc) => svc,
Err(e) => { Err(e) => {
log::error!("Failed to open service '{}': {:?}", service_name, e); log::error!("Failed to open service '{service_name}': {e:?}");
return Ok(false); return Ok(false);
} }
}; };
@@ -247,13 +239,13 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) => Ok(true), Ok(true) => Ok(true),
Ok(false) => Ok(false), Ok(false) => Ok(false),
Err(e) => { Err(e) => {
log::error!("Error during service elevation: {:?}", e); log::error!("Error during service elevation: {e:?}");
Ok(false) Ok(false)
} }
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to query service status: {:?}", e); log::error!("Failed to query service status: {e:?}");
match start_service_with_elevation(service_name) { match start_service_with_elevation(service_name) {
Ok(true) => Ok(true), Ok(true) => Ok(true),
Ok(false) => { Ok(false) => {
@@ -261,7 +253,7 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(false) Ok(false)
} }
Err(elev_err) => { Err(elev_err) => {
log::error!("Error during service elevation: {:?}", elev_err); log::error!("Error during service elevation: {elev_err:?}");
Ok(false) Ok(false)
} }
} }
@@ -273,7 +265,7 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> { pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
const SERVICE_NAME: &str = "openlist-desktop-service"; const SERVICE_NAME: &str = "openlist-desktop-service";
log::info!("Checking Linux service status for: {}", SERVICE_NAME); log::info!("Checking Linux service status for: {SERVICE_NAME}");
let init_system = detect_linux_init_system(); let init_system = detect_linux_init_system();
@@ -281,7 +273,7 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
"systemd" => start_systemd_service_with_check(SERVICE_NAME).await, "systemd" => start_systemd_service_with_check(SERVICE_NAME).await,
"openrc" => start_openrc_service_with_check(SERVICE_NAME).await, "openrc" => start_openrc_service_with_check(SERVICE_NAME).await,
_ => { _ => {
log::warn!("Unknown init system: {}, assuming systemd", init_system); log::warn!("Unknown init system: {init_system}, assuming systemd");
start_systemd_service_with_check(SERVICE_NAME).await start_systemd_service_with_check(SERVICE_NAME).await
} }
} }
@@ -291,10 +283,10 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
async fn start_systemd_service_with_check( async fn start_systemd_service_with_check(
service_name: &str, service_name: &str,
) -> Result<bool, Box<dyn std::error::Error>> { ) -> Result<bool, Box<dyn std::error::Error>> {
log::info!("Checking systemd service status for: {}", service_name); log::info!("Checking systemd service status for: {service_name}");
let status_output = StdCommand::new("systemctl") let status_output = StdCommand::new("systemctl")
.args(&["is-active", service_name]) .args(["is-active", service_name])
.output(); .output();
match status_output { match status_output {
@@ -302,21 +294,21 @@ async fn start_systemd_service_with_check(
let status = String::from_utf8_lossy(&output.stdout) let status = String::from_utf8_lossy(&output.stdout)
.trim() .trim()
.to_lowercase(); .to_lowercase();
log::info!("Service {} status: {}", service_name, status); log::info!("Service {service_name} status: {status}");
match status.as_str() { match status.as_str() {
"active" | "activating" => { "active" | "activating" => {
log::info!("Service is active and running"); log::info!("Service is active and running");
return Ok(true); Ok(true)
} }
"inactive" | "failed" => { "inactive" | "failed" => {
log::info!("Service is {}, attempting to start", status); log::info!("Service is {status}, attempting to start");
return start_systemd_service(service_name).await; return start_systemd_service(service_name).await;
} }
"unknown" => { "unknown" => {
log::warn!("Service status unknown, checking if service exists"); log::warn!("Service status unknown, checking if service exists");
let exists_output = StdCommand::new("systemctl") let exists_output = StdCommand::new("systemctl")
.args(&["list-unit-files", &format!("{}.service", service_name)]) .args(["list-unit-files", &format!("{service_name}.service")])
.output(); .output();
match exists_output { match exists_output {
@@ -326,24 +318,24 @@ async fn start_systemd_service_with_check(
log::info!("Service exists but not active, attempting to start"); log::info!("Service exists but not active, attempting to start");
return start_systemd_service(service_name).await; return start_systemd_service(service_name).await;
} else { } else {
log::error!("Service {} not found", service_name); log::error!("Service {service_name} not found");
return Ok(false); Ok(false)
} }
} }
_ => { _ => {
log::error!("Failed to check if service exists"); log::error!("Failed to check if service exists");
return Ok(false); Ok(false)
} }
} }
} }
_ => { _ => {
log::warn!("Unknown service status: {}, attempting to start", status); log::warn!("Unknown service status: {status}, attempting to start");
return start_systemd_service(service_name).await; return start_systemd_service(service_name).await;
} }
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check systemd service status: {}", e); log::error!("Failed to check systemd service status: {e}");
return start_systemd_service(service_name).await; return start_systemd_service(service_name).await;
} }
} }
@@ -353,10 +345,10 @@ async fn start_systemd_service_with_check(
async fn start_openrc_service_with_check( async fn start_openrc_service_with_check(
service_name: &str, service_name: &str,
) -> Result<bool, Box<dyn std::error::Error>> { ) -> Result<bool, Box<dyn std::error::Error>> {
log::info!("Checking OpenRC service status for: {}", service_name); log::info!("Checking OpenRC service status for: {service_name}");
let status_output = StdCommand::new("rc-service") let status_output = StdCommand::new("rc-service")
.args(&[service_name, "status"]) .args([service_name, "status"])
.output(); .output();
match status_output { match status_output {
@@ -364,16 +356,16 @@ async fn start_openrc_service_with_check(
let status_str = String::from_utf8_lossy(&output.stdout).to_lowercase(); let status_str = String::from_utf8_lossy(&output.stdout).to_lowercase();
let stderr_str = String::from_utf8_lossy(&output.stderr).to_lowercase(); let stderr_str = String::from_utf8_lossy(&output.stderr).to_lowercase();
log::info!("OpenRC service status output: {}", status_str); log::info!("OpenRC service status output: {status_str}");
if status_str.contains("started") || status_str.contains("running") { if status_str.contains("started") || status_str.contains("running") {
log::info!("Service is running"); log::info!("Service is running");
return Ok(true); Ok(true)
} else if status_str.contains("stopped") || status_str.contains("inactive") { } else if status_str.contains("stopped") || status_str.contains("inactive") {
log::info!("Service is stopped, attempting to start"); log::info!("Service is stopped, attempting to start");
return start_openrc_service(service_name).await; return start_openrc_service(service_name).await;
} else if stderr_str.contains("does not exist") { } else if stderr_str.contains("does not exist") {
log::error!("Service {} does not exist", service_name); log::error!("Service {service_name} does not exist");
return Ok(false); return Ok(false);
} else { } else {
log::warn!("Unknown service status, attempting to start"); log::warn!("Unknown service status, attempting to start");
@@ -381,7 +373,7 @@ async fn start_openrc_service_with_check(
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check OpenRC service status: {}", e); log::error!("Failed to check OpenRC service status: {e}");
return start_openrc_service(service_name).await; return start_openrc_service(service_name).await;
} }
} }
@@ -406,8 +398,7 @@ pub async fn install_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to install service, exit status: {}", "Failed to install service, exit status: {status}"
status
))) )))
} }
} }
@@ -418,10 +409,9 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
let uninstall_path = app_dir.join("uninstall-openlist-service"); let uninstall_path = app_dir.join("uninstall-openlist-service");
if !uninstall_path.exists() { if !uninstall_path.exists() {
error!("Uninstaller not found: {:?}", uninstall_path); error!("Uninstaller not found: {uninstall_path:?}");
return Err(Box::from(format!( return Err(Box::from(format!(
"Uninstaller not found: {:?}", "Uninstaller not found: {uninstall_path:?}"
uninstall_path
))); )));
} }
let status = StdCommand::new(&uninstall_path).status()?; let status = StdCommand::new(&uninstall_path).status()?;
@@ -430,8 +420,7 @@ pub async fn uninstall_service() -> Result<bool, Box<dyn std::error::Error>> {
Ok(true) Ok(true)
} else { } else {
Err(Box::from(format!( Err(Box::from(format!(
"Failed to uninstall service, exit status: {}", "Failed to uninstall service, exit status: {status}"
status
))) )))
} }
} }
@@ -452,26 +441,24 @@ pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>
let service = match manager.open_service(service_name, ServiceAccess::QUERY_STATUS) { let service = match manager.open_service(service_name, ServiceAccess::QUERY_STATUS) {
Ok(svc) => svc, Ok(svc) => svc,
Err(e) => { Err(e) => {
log::error!("Failed to open service '{}': {:?}", service_name, e); log::error!("Failed to open service '{service_name}': {e:?}");
return Ok("not-installed".to_string()); return Ok("not-installed".to_string());
} }
}; };
match service.query_status() { match service.query_status() {
Ok(status) => match status.current_state { Ok(status) => match status.current_state {
ServiceState::Running | ServiceState::StartPending => { ServiceState::Running | ServiceState::StartPending => Ok("running".to_string()),
return Ok("running".to_string());
}
ServiceState::StopPending => { ServiceState::StopPending => {
std::thread::sleep(std::time::Duration::from_millis(1000)); std::thread::sleep(std::time::Duration::from_millis(1000));
return Ok("stopped".to_string()); Ok("stopped".to_string())
} }
_ => { _ => {
log::info!("Service is in state: {:?}.", status.current_state); log::info!("Service is in state: {:?}.", status.current_state);
return Ok("stopped".to_string()); Ok("stopped".to_string())
} }
}, },
Err(e) => { Err(e) => {
log::error!("Failed to query service status: {:?}", e); log::error!("Failed to query service status: {e:?}");
match start_service_with_elevation(service_name) { match start_service_with_elevation(service_name) {
Ok(true) => Ok("running".to_string()), Ok(true) => Ok("running".to_string()),
Ok(false) => { Ok(false) => {
@@ -479,7 +466,7 @@ pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>
Ok("stopped".to_string()) Ok("stopped".to_string())
} }
Err(elev_err) => { Err(elev_err) => {
log::error!("Error during service elevation: {:?}", elev_err); log::error!("Error during service elevation: {elev_err:?}");
Ok("error".to_string()) Ok("error".to_string())
} }
} }
@@ -491,7 +478,7 @@ pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>
pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>> { pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>> {
const SERVICE_NAME: &str = "openlist-desktop-service"; const SERVICE_NAME: &str = "openlist-desktop-service";
log::info!("Checking Linux service status for: {}", SERVICE_NAME); log::info!("Checking Linux service status for: {SERVICE_NAME}");
let init_system = detect_linux_init_system(); let init_system = detect_linux_init_system();
@@ -499,7 +486,7 @@ pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>
"systemd" => check_systemd_service_status(SERVICE_NAME).await, "systemd" => check_systemd_service_status(SERVICE_NAME).await,
"openrc" => check_openrc_service_status(SERVICE_NAME).await, "openrc" => check_openrc_service_status(SERVICE_NAME).await,
_ => { _ => {
log::warn!("Unknown init system: {}, assuming systemd", init_system); log::warn!("Unknown init system: {init_system}, assuming systemd");
check_systemd_service_status(SERVICE_NAME).await check_systemd_service_status(SERVICE_NAME).await
} }
} }
@@ -534,10 +521,10 @@ fn detect_linux_init_system() -> String {
async fn check_systemd_service_status( async fn check_systemd_service_status(
service_name: &str, service_name: &str,
) -> Result<String, Box<dyn std::error::Error>> { ) -> Result<String, Box<dyn std::error::Error>> {
log::info!("Checking systemd service status for: {}", service_name); log::info!("Checking systemd service status for: {service_name}");
let status_output = StdCommand::new("systemctl") let status_output = StdCommand::new("systemctl")
.args(&["is-active", service_name]) .args(["is-active", service_name])
.output(); .output();
match status_output { match status_output {
@@ -545,21 +532,21 @@ async fn check_systemd_service_status(
let status = String::from_utf8_lossy(&output.stdout) let status = String::from_utf8_lossy(&output.stdout)
.trim() .trim()
.to_lowercase(); .to_lowercase();
log::info!("Service {} status: {}", service_name, status); log::info!("Service {service_name} status: {status}");
match status.as_str() { match status.as_str() {
"active" | "activating" => { "active" | "activating" => {
log::info!("Service is active and running"); log::info!("Service is active and running");
return Ok("running".to_string()); Ok("running".to_string())
} }
"inactive" | "failed" => { "inactive" | "failed" => {
log::info!("Service is {}", status); log::info!("Service is {status}");
return Ok("stopped".to_string()); Ok("stopped".to_string())
} }
"unknown" => { "unknown" => {
log::warn!("Service status unknown, checking if service exists"); log::warn!("Service status unknown, checking if service exists");
let exists_output = StdCommand::new("systemctl") let exists_output = StdCommand::new("systemctl")
.args(&["list-unit-files", &format!("{}.service", service_name)]) .args(["list-unit-files", &format!("{service_name}.service")])
.output(); .output();
match exists_output { match exists_output {
@@ -567,27 +554,27 @@ async fn check_systemd_service_status(
let output_str = String::from_utf8_lossy(&output.stdout); let output_str = String::from_utf8_lossy(&output.stdout);
if output_str.contains(service_name) { if output_str.contains(service_name) {
log::info!("Service exists and not active"); log::info!("Service exists and not active");
return Ok("stopped".to_string()); Ok("stopped".to_string())
} else { } else {
log::error!("Service {} not found", service_name); log::error!("Service {service_name} not found");
return Ok("not-installed".to_string()); Ok("not-installed".to_string())
} }
} }
_ => { _ => {
log::error!("Failed to check if service exists"); log::error!("Failed to check if service exists");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
_ => { _ => {
log::warn!("Unknown service status: {}", status); log::warn!("Unknown service status: {status}");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check systemd service status: {}", e); log::error!("Failed to check systemd service status: {e}");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
@@ -596,18 +583,18 @@ async fn check_systemd_service_status(
async fn start_systemd_service(service_name: &str) -> Result<bool, Box<dyn std::error::Error>> { async fn start_systemd_service(service_name: &str) -> Result<bool, Box<dyn std::error::Error>> {
use users::get_effective_uid; use users::get_effective_uid;
log::info!("Attempting to start systemd service: {}", service_name); log::info!("Attempting to start systemd service: {service_name}");
let status = match get_effective_uid() { let status = match get_effective_uid() {
0 => StdCommand::new("systemctl") 0 => StdCommand::new("systemctl")
.args(&["start", service_name]) .args(["start", service_name])
.status()?, .status()?,
_ => { _ => {
let elevator = linux_elevator(); let elevator = linux_elevator();
log::info!("Using {} for elevation", elevator); log::info!("Using {elevator} for elevation");
StdCommand::new(&elevator) StdCommand::new(&elevator)
.args(&["systemctl", "start", service_name]) .args(["systemctl", "start", service_name])
.status()? .status()?
} }
}; };
@@ -617,7 +604,7 @@ async fn start_systemd_service(service_name: &str) -> Result<bool, Box<dyn std::
std::thread::sleep(std::time::Duration::from_millis(1000)); std::thread::sleep(std::time::Duration::from_millis(1000));
let verify_output = StdCommand::new("systemctl") let verify_output = StdCommand::new("systemctl")
.args(&["is-active", service_name]) .args(["is-active", service_name])
.output()?; .output()?;
let verify_status_str = String::from_utf8_lossy(&verify_output.stdout); let verify_status_str = String::from_utf8_lossy(&verify_output.stdout);
@@ -628,14 +615,13 @@ async fn start_systemd_service(service_name: &str) -> Result<bool, Box<dyn std::
log::info!("Service verified as running"); log::info!("Service verified as running");
} else { } else {
log::warn!( log::warn!(
"Service start command succeeded but service is not active: {}", "Service start command succeeded but service is not active: {verify_status}"
verify_status
); );
} }
Ok(is_running) Ok(is_running)
} else { } else {
log::error!("Failed to start service, exit code: {}", status); log::error!("Failed to start service, exit code: {status}");
Ok(false) Ok(false)
} }
} }
@@ -644,10 +630,10 @@ async fn start_systemd_service(service_name: &str) -> Result<bool, Box<dyn std::
async fn check_openrc_service_status( async fn check_openrc_service_status(
service_name: &str, service_name: &str,
) -> Result<String, Box<dyn std::error::Error>> { ) -> Result<String, Box<dyn std::error::Error>> {
log::info!("Checking OpenRC service status for: {}", service_name); log::info!("Checking OpenRC service status for: {service_name}");
let status_output = StdCommand::new("rc-service") let status_output = StdCommand::new("rc-service")
.args(&[service_name, "status"]) .args([service_name, "status"])
.output(); .output();
match status_output { match status_output {
@@ -655,25 +641,25 @@ async fn check_openrc_service_status(
let status_str = String::from_utf8_lossy(&output.stdout).to_lowercase(); let status_str = String::from_utf8_lossy(&output.stdout).to_lowercase();
let stderr_str = String::from_utf8_lossy(&output.stderr).to_lowercase(); let stderr_str = String::from_utf8_lossy(&output.stderr).to_lowercase();
log::info!("OpenRC service status output: {}", status_str); log::info!("OpenRC service status output: {status_str}");
if status_str.contains("started") || status_str.contains("running") { if status_str.contains("started") || status_str.contains("running") {
log::info!("Service is running"); log::info!("Service is running");
return Ok("running".to_string()); Ok("running".to_string())
} else if status_str.contains("stopped") || status_str.contains("inactive") { } else if status_str.contains("stopped") || status_str.contains("inactive") {
log::info!("Service is stopped"); log::info!("Service is stopped");
return Ok("stopped".to_string()); Ok("stopped".to_string())
} else if stderr_str.contains("does not exist") { } else if stderr_str.contains("does not exist") {
log::error!("Service {} does not exist", service_name); log::error!("Service {service_name} does not exist");
return Ok("not-installed".to_string()); Ok("not-installed".to_string())
} else { } else {
log::warn!("Unknown service status, attempting to start"); log::warn!("Unknown service status, attempting to start");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check OpenRC service status: {}", e); log::error!("Failed to check OpenRC service status: {e}");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
@@ -682,17 +668,17 @@ async fn check_openrc_service_status(
async fn start_openrc_service(service_name: &str) -> Result<bool, Box<dyn std::error::Error>> { async fn start_openrc_service(service_name: &str) -> Result<bool, Box<dyn std::error::Error>> {
use users::get_effective_uid; use users::get_effective_uid;
log::info!("Attempting to start OpenRC service: {}", service_name); log::info!("Attempting to start OpenRC service: {service_name}");
let status = match get_effective_uid() { let status = match get_effective_uid() {
0 => StdCommand::new("rc-service") 0 => StdCommand::new("rc-service")
.args(&[service_name, "start"]) .args([service_name, "start"])
.status()?, .status()?,
_ => { _ => {
let elevator = linux_elevator(); let elevator = linux_elevator();
log::info!("Using {} for elevation", elevator); log::info!("Using {elevator} for elevation");
StdCommand::new(&elevator) StdCommand::new(&elevator)
.args(&["rc-service", service_name, "start"]) .args(["rc-service", service_name, "start"])
.status()? .status()?
} }
}; };
@@ -702,7 +688,7 @@ async fn start_openrc_service(service_name: &str) -> Result<bool, Box<dyn std::e
std::thread::sleep(std::time::Duration::from_millis(1000)); std::thread::sleep(std::time::Duration::from_millis(1000));
let verify_output = StdCommand::new("rc-service") let verify_output = StdCommand::new("rc-service")
.args(&[service_name, "status"]) .args([service_name, "status"])
.output()?; .output()?;
let verify_status = String::from_utf8_lossy(&verify_output.stdout).to_lowercase(); let verify_status = String::from_utf8_lossy(&verify_output.stdout).to_lowercase();
@@ -712,14 +698,13 @@ async fn start_openrc_service(service_name: &str) -> Result<bool, Box<dyn std::e
log::info!("Service verified as running"); log::info!("Service verified as running");
} else { } else {
log::warn!( log::warn!(
"Service start command succeeded but service is not running: {}", "Service start command succeeded but service is not running: {verify_status}"
verify_status
); );
} }
Ok(is_running) Ok(is_running)
} else { } else {
log::error!("Failed to start OpenRC service, exit code: {}", status); log::error!("Failed to start OpenRC service, exit code: {status}");
Ok(false) Ok(false)
} }
} }
@@ -728,23 +713,23 @@ async fn start_openrc_service(service_name: &str) -> Result<bool, Box<dyn std::e
pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> { pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
const SERVICE_IDENTIFIER: &str = "io.github.openlistteam.openlist.service"; const SERVICE_IDENTIFIER: &str = "io.github.openlistteam.openlist.service";
log::info!("Checking macOS service status for: {}", SERVICE_IDENTIFIER); log::info!("Checking macOS service status for: {SERVICE_IDENTIFIER}");
let status_output = StdCommand::new("launchctl") let status_output = StdCommand::new("launchctl")
.args(&["list", SERVICE_IDENTIFIER]) .args(["list", SERVICE_IDENTIFIER])
.output(); .output();
match status_output { match status_output {
Ok(output) => { Ok(output) => {
if output.status.success() { if output.status.success() {
let output_str = String::from_utf8_lossy(&output.stdout); let output_str = String::from_utf8_lossy(&output.stdout);
log::info!("launchctl list output: {}", output_str); log::info!("launchctl list output: {output_str}");
if let Some(pid_value) = extract_plist_value(&output_str, "PID") { if let Some(pid_value) = extract_plist_value(&output_str, "PID") {
log::info!("Extracted PID value: {}", pid_value); log::info!("Extracted PID value: {pid_value}");
if let Ok(pid) = pid_value.parse::<i32>() { if let Ok(pid) = pid_value.parse::<i32>() {
if pid > 0 { if pid > 0 {
log::info!("Service is running with PID: {}", pid); log::info!("Service is running with PID: {pid}");
return Ok(true); return Ok(true);
} }
} }
@@ -759,10 +744,9 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
return start_macos_service(SERVICE_IDENTIFIER).await; return start_macos_service(SERVICE_IDENTIFIER).await;
} else { } else {
log::warn!( log::warn!(
"Service has non-zero exit status: {}, attempting to restart", "Service has non-zero exit status: {status}, attempting to restart"
status
); );
return restart_macos_service(SERVICE_IDENTIFIER).await; return start_macos_service(SERVICE_IDENTIFIER).await;
} }
} }
} }
@@ -772,8 +756,8 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
} else { } else {
let stderr_str = String::from_utf8_lossy(&output.stderr); let stderr_str = String::from_utf8_lossy(&output.stderr);
if stderr_str.contains("Could not find service") { if stderr_str.contains("Could not find service") {
log::error!("Service {} is not loaded", SERVICE_IDENTIFIER); log::error!("Service {SERVICE_IDENTIFIER} is not loaded");
return Ok(false); Ok(false)
} else { } else {
log::warn!("launchctl list failed, attempting to start service anyway"); log::warn!("launchctl list failed, attempting to start service anyway");
return start_macos_service(SERVICE_IDENTIFIER).await; return start_macos_service(SERVICE_IDENTIFIER).await;
@@ -781,7 +765,7 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check macOS service status: {}", e); log::error!("Failed to check macOS service status: {e}");
return start_macos_service(SERVICE_IDENTIFIER).await; return start_macos_service(SERVICE_IDENTIFIER).await;
} }
} }
@@ -791,23 +775,23 @@ pub async fn start_service() -> Result<bool, Box<dyn std::error::Error>> {
pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>> { pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>> {
const SERVICE_IDENTIFIER: &str = "io.github.openlistteam.openlist.service"; const SERVICE_IDENTIFIER: &str = "io.github.openlistteam.openlist.service";
log::info!("Checking macOS service status for: {}", SERVICE_IDENTIFIER); log::info!("Checking macOS service status for: {SERVICE_IDENTIFIER}");
let status_output = StdCommand::new("launchctl") let status_output = StdCommand::new("launchctl")
.args(&["list", SERVICE_IDENTIFIER]) .args(["list", SERVICE_IDENTIFIER])
.output(); .output();
match status_output { match status_output {
Ok(output) => { Ok(output) => {
if output.status.success() { if output.status.success() {
let output_str = String::from_utf8_lossy(&output.stdout); let output_str = String::from_utf8_lossy(&output.stdout);
log::info!("launchctl list output: {}", output_str); log::info!("launchctl list output: {output_str}");
if let Some(pid_value) = extract_plist_value(&output_str, "PID") { if let Some(pid_value) = extract_plist_value(&output_str, "PID") {
log::info!("Extracted PID value: {}", pid_value); log::info!("Extracted PID value: {pid_value}");
if let Ok(pid) = pid_value.parse::<i32>() { if let Ok(pid) = pid_value.parse::<i32>() {
if pid > 0 { if pid > 0 {
log::info!("Service is running with PID: {}", pid); log::info!("Service is running with PID: {pid}");
return Ok("running".to_string()); return Ok("running".to_string());
} }
} }
@@ -819,38 +803,38 @@ pub async fn check_service_status() -> Result<String, Box<dyn std::error::Error>
log::info!("Service is loaded but not running (clean exit)"); log::info!("Service is loaded but not running (clean exit)");
return Ok("stopped".to_string()); return Ok("stopped".to_string());
} else { } else {
log::warn!("Service has non-zero exit status: {}", status); log::warn!("Service has non-zero exit status: {status}");
return Ok("stopped".to_string()); return Ok("stopped".to_string());
} }
} }
} }
log::info!("Service appears to be loaded but status unclear"); log::info!("Service appears to be loaded but status unclear");
return Ok("error".to_string()); Ok("error".to_string())
} else { } else {
let stderr_str = String::from_utf8_lossy(&output.stderr); let stderr_str = String::from_utf8_lossy(&output.stderr);
if stderr_str.contains("Could not find service") { if stderr_str.contains("Could not find service") {
log::error!("Service {} is not loaded", SERVICE_IDENTIFIER); log::error!("Service {SERVICE_IDENTIFIER} is not loaded");
return Ok("not-installed".to_string()); Ok("not-installed".to_string())
} else { } else {
log::warn!("launchctl list failed"); log::warn!("launchctl list failed");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
Err(e) => { Err(e) => {
log::error!("Failed to check macOS service status: {}", e); log::error!("Failed to check macOS service status: {e}");
return Ok("error".to_string()); Ok("error".to_string())
} }
} }
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
async fn start_macos_service(service_identifier: &str) -> Result<bool, Box<dyn std::error::Error>> { async fn start_macos_service(service_identifier: &str) -> Result<bool, Box<dyn std::error::Error>> {
log::info!("Attempting to start macOS service: {}", service_identifier); log::info!("Attempting to start macOS service: {service_identifier}");
let status = StdCommand::new("launchctl") let status = StdCommand::new("launchctl")
.args(&["start", service_identifier]) .args(["start", service_identifier])
.status()?; .status()?;
if status.success() { if status.success() {
@@ -858,20 +842,20 @@ async fn start_macos_service(service_identifier: &str) -> Result<bool, Box<dyn s
std::thread::sleep(std::time::Duration::from_millis(2000)); std::thread::sleep(std::time::Duration::from_millis(2000));
let verify_output = StdCommand::new("launchctl") let verify_output = StdCommand::new("launchctl")
.args(&["list", service_identifier]) .args(["list", service_identifier])
.output()?; .output()?;
if verify_output.status.success() { if verify_output.status.success() {
let output_str = String::from_utf8_lossy(&verify_output.stdout); let output_str = String::from_utf8_lossy(&verify_output.stdout);
log::info!("Verification output: {}", output_str); log::info!("Verification output: {output_str}");
if let Some(pid_value) = extract_plist_value(&output_str, "PID") { if let Some(pid_value) = extract_plist_value(&output_str, "PID") {
if let Ok(pid) = pid_value.parse::<i32>() { if let Ok(pid) = pid_value.parse::<i32>() {
if pid > 0 { if pid > 0 {
log::info!("Service verified as running with PID: {}", pid); log::info!("Service verified as running with PID: {pid}");
return Ok(true); return Ok(true);
} else { } else {
log::warn!("Service has invalid PID: {}", pid); log::warn!("Service has invalid PID: {pid}");
return Ok(false); return Ok(false);
} }
} }
@@ -886,14 +870,14 @@ async fn start_macos_service(service_identifier: &str) -> Result<bool, Box<dyn s
log::warn!("Could not verify service status after start"); log::warn!("Could not verify service status after start");
Ok(false) Ok(false)
} else { } else {
log::error!("Failed to start macOS service, exit code: {}", status); log::error!("Failed to start macOS service, exit code: {status}");
Ok(false) Ok(false)
} }
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn extract_plist_value(plist_output: &str, key: &str) -> Option<String> { fn extract_plist_value(plist_output: &str, key: &str) -> Option<String> {
let pattern = format!("\"{}\"", key); let pattern = format!("\"{key}\"");
for line in plist_output.lines() { for line in plist_output.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
@@ -902,8 +886,8 @@ fn extract_plist_value(plist_output: &str, key: &str) -> Option<String> {
let value_part = &trimmed[equals_pos + 1..]; let value_part = &trimmed[equals_pos + 1..];
let value_trimmed = value_part.trim(); let value_trimmed = value_part.trim();
let value_clean = if value_trimmed.ends_with(';') { let value_clean = if let Some(stripped) = value_trimmed.strip_suffix(';') {
&value_trimmed[..value_trimmed.len() - 1] stripped
} else { } else {
value_trimmed value_trimmed
}; };

View File

@@ -8,7 +8,7 @@ mod tray;
mod utils; mod utils;
use cmd::binary::get_binary_version; use cmd::binary::get_binary_version;
use cmd::config::{load_settings, reset_settings, save_settings}; use cmd::config::{load_settings, reset_settings, save_settings, save_settings_with_update_port};
use cmd::custom_updater::{ use cmd::custom_updater::{
check_for_updates, download_update, get_current_version, install_update_and_restart, check_for_updates, download_update, get_current_version, install_update_and_restart,
is_auto_check_enabled, restart_app, set_auto_check_enabled, is_auto_check_enabled, restart_app, set_auto_check_enabled,
@@ -45,7 +45,7 @@ async fn update_tray_menu(
service_running: bool, service_running: bool,
) -> Result<(), String> { ) -> Result<(), String> {
tray::update_tray_menu(&app_handle, service_running) tray::update_tray_menu(&app_handle, service_running)
.map_err(|e| format!("Failed to update tray menu: {}", e)) .map_err(|e| format!("Failed to update tray menu: {e}"))
} }
#[tauri::command] #[tauri::command]
@@ -54,7 +54,7 @@ async fn update_tray_menu_delayed(
service_running: bool, service_running: bool,
) -> Result<(), String> { ) -> Result<(), String> {
tray::update_tray_menu_delayed(&app_handle, service_running) tray::update_tray_menu_delayed(&app_handle, service_running)
.map_err(|e| format!("Failed to update tray menu (delayed): {}", e)) .map_err(|e| format!("Failed to update tray menu (delayed): {e}"))
} }
#[tauri::command] #[tauri::command]
@@ -63,7 +63,7 @@ async fn force_update_tray_menu(
service_running: bool, service_running: bool,
) -> Result<(), String> { ) -> Result<(), String> {
tray::force_update_tray_menu(&app_handle, service_running) tray::force_update_tray_menu(&app_handle, service_running)
.map_err(|e| format!("Failed to force update tray menu: {}", e)) .map_err(|e| format!("Failed to force update tray menu: {e}"))
} }
fn setup_background_update_checker(app_handle: &tauri::AppHandle) { fn setup_background_update_checker(app_handle: &tauri::AppHandle) {
@@ -79,7 +79,7 @@ fn setup_background_update_checker(app_handle: &tauri::AppHandle) {
cmd::custom_updater::perform_background_update_check(app_handle_initial.clone()) cmd::custom_updater::perform_background_update_check(app_handle_initial.clone())
.await .await
{ {
log::debug!("Initial background update check failed: {}", e); log::debug!("Initial background update check failed: {e}");
} }
} }
_ => { _ => {
@@ -94,10 +94,10 @@ pub fn run() {
let app_state = AppState::new(); let app_state = AppState::new();
log::info!("Starting {}...", utils::path::APP_ID); log::info!("Starting {}...", utils::path::APP_ID);
unsafe { #[cfg(target_os = "linux")]
#[cfg(target_os = "linux")] {
std::env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1") unsafe { std::env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1") };
}; }
tauri::Builder::default() tauri::Builder::default()
.plugin(tauri_plugin_single_instance::init(|app, _args, _cwd| { .plugin(tauri_plugin_single_instance::init(|app, _args, _cwd| {
@@ -142,6 +142,7 @@ pub fn run() {
open_folder, open_folder,
open_url, open_url,
save_settings, save_settings,
save_settings_with_update_port,
load_settings, load_settings,
reset_settings, reset_settings,
get_logs, get_logs,
@@ -174,20 +175,19 @@ pub fn run() {
utils::init_log::init_log()?; utils::init_log::init_log()?;
utils::path::get_app_config_dir()?; utils::path::get_app_config_dir()?;
let app_state = app.state::<AppState>(); let app_state = app.state::<AppState>();
if let Err(e) = app_state.init(&app_handle) { if let Err(e) = app_state.init(app_handle) {
log::error!("Failed to initialize app state: {}", e); log::error!("Failed to initialize app state: {e}");
return Err(Box::new(std::io::Error::new( return Err(Box::new(std::io::Error::other(format!(
std::io::ErrorKind::Other, "App state initialization failed: {e}"
format!("App state initialization failed: {}", e), ))));
)));
} }
if let Err(e) = tray::create_tray(&app_handle) { if let Err(e) = tray::create_tray(app_handle) {
log::error!("Failed to create system tray: {}", e); log::error!("Failed to create system tray: {e}");
} else { } else {
log::info!("System tray created successfully"); log::info!("System tray created successfully");
} }
setup_background_update_checker(&app_handle); setup_background_update_checker(app_handle);
if let Some(window) = app.get_webview_window("main") { if let Some(window) = app.get_webview_window("main") {
let app_handle_clone = app_handle.clone(); let app_handle_clone = app_handle.clone();

View File

@@ -31,7 +31,7 @@ impl AppState {
Ok(()) Ok(())
} }
Err(e) => { Err(e) => {
log::warn!("Failed to load settings, using defaults: {}", e); log::warn!("Failed to load settings, using defaults: {e}");
let default_settings = MergedSettings::default(); let default_settings = MergedSettings::default();
let mut app_settings = self.app_settings.write(); let mut app_settings = self.app_settings.write();
*app_settings = Some(default_settings); *app_settings = Some(default_settings);

View File

@@ -79,7 +79,7 @@ pub fn create_tray(app_handle: &AppHandle) -> tauri::Result<()> {
log::debug!("Mouse left tray icon area"); log::debug!("Mouse left tray icon area");
} }
_ => { _ => {
log::debug!("Other tray event: {:?}", event); log::debug!("Other tray event: {event:?}");
} }
} }
}) })
@@ -200,10 +200,7 @@ pub fn update_tray_menu(app_handle: &AppHandle, service_running: bool) -> tauri:
)?; )?;
tray.set_menu(Some(menu))?; tray.set_menu(Some(menu))?;
log::debug!( log::debug!("Tray menu updated with service_running: {service_running}");
"Tray menu updated with service_running: {}",
service_running
);
} }
Ok(()) Ok(())
} }
@@ -213,23 +210,24 @@ pub fn update_tray_menu_delayed(
service_running: bool, service_running: bool,
) -> tauri::Result<()> { ) -> tauri::Result<()> {
let app_handle_clone = app_handle.clone(); let app_handle_clone = app_handle.clone();
println!("Scheduling delayed tray menu update...");
std::thread::spawn(move || { std::thread::spawn(move || {
std::thread::sleep(std::time::Duration::from_millis(3000)); std::thread::sleep(std::time::Duration::from_millis(3000));
if let Err(e) = update_tray_menu(&app_handle_clone, service_running) { if let Err(e) = update_tray_menu(&app_handle_clone, service_running) {
log::error!("Failed to update tray menu (delayed): {}", e); log::error!("Failed to update tray menu (delayed): {e}");
} }
}); });
Ok(()) Ok(())
} }
fn handle_core_action(app_handle: &AppHandle, action: &str) { fn handle_core_action(app_handle: &AppHandle, action: &str) {
log::info!("Handling core action from tray: {}", action); log::info!("Handling core action from tray: {action}");
if let Err(e) = app_handle.emit("tray-core-action", action) { if let Err(e) = app_handle.emit("tray-core-action", action) {
log::error!("Failed to emit tray core action event: {}", e); log::error!("Failed to emit tray core action event: {e}");
} }
log::debug!("Core action '{}' dispatched to frontend", action); log::debug!("Core action '{action}' dispatched to frontend");
} }
pub fn force_update_tray_menu(app_handle: &AppHandle, service_running: bool) -> tauri::Result<()> { pub fn force_update_tray_menu(app_handle: &AppHandle, service_running: bool) -> tauri::Result<()> {
@@ -288,10 +286,7 @@ pub fn force_update_tray_menu(app_handle: &AppHandle, service_running: bool) ->
*last_update = Some(Instant::now()); *last_update = Some(Instant::now());
} }
log::debug!( log::debug!("Tray menu force updated with service_running: {service_running}");
"Tray menu force updated with service_running: {}",
service_running
);
} }
Ok(()) Ok(())
} }

View File

@@ -1,9 +1,6 @@
use crate::object::structs::AppState;
use std::{collections::HashMap, env}; use std::{collections::HashMap, env};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tauri::State;
#[derive(Debug, Deserialize, Serialize, Clone)] #[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ProcessConfig { pub struct ProcessConfig {
@@ -56,21 +53,11 @@ pub fn get_server_port() -> u16 {
env::var("PROCESS_MANAGER_PORT") env::var("PROCESS_MANAGER_PORT")
.ok() .ok()
.and_then(|port_str| port_str.parse().ok()) .and_then(|port_str| port_str.parse().ok())
.unwrap_or_else(|| DEFAULT_HTTP_SERVER_PORT) .unwrap_or(DEFAULT_HTTP_SERVER_PORT)
} }
pub fn get_api_key(state: State<'_, AppState>) -> String { pub fn get_api_key() -> String {
let app_settings = state env::var("PROCESS_MANAGER_API_KEY")
.app_settings .ok()
.read() .unwrap_or_else(|| DEFAULT_API_KEY.to_string())
.clone()
.ok_or_else(|| "Failed to read app settings".to_string())
.unwrap();
let openlist_config = app_settings.openlist;
if openlist_config.api_token != "" {
return openlist_config.api_token.clone();
}
let api_key =
env::var("PROCESS_MANAGER_API_KEY").unwrap_or_else(|_| DEFAULT_API_KEY.to_string());
api_key
} }

View File

@@ -5,15 +5,12 @@ pub static APP_ID: &str = "io.github.openlistteam.openlist.desktop";
fn get_app_dir() -> Result<PathBuf, String> { fn get_app_dir() -> Result<PathBuf, String> {
let app_dir = env::current_exe() let app_dir = env::current_exe()
.map_err(|e| format!("Failed to get current exe path: {}", e))? .map_err(|e| format!("Failed to get current exe path: {e}"))?
.parent() .parent()
.ok_or("Failed to get parent directory")? .ok_or("Failed to get parent directory")?
.to_path_buf(); .to_path_buf();
if !app_dir.exists() { if !app_dir.exists() {
return Err(format!( return Err(format!("Application directory does not exist: {app_dir:?}"));
"Application directory does not exist: {:?}",
app_dir
));
} }
Ok(app_dir) Ok(app_dir)
@@ -31,8 +28,7 @@ pub fn get_openlist_binary_path() -> Result<PathBuf, String> {
if !binary_path.exists() { if !binary_path.exists() {
return Err(format!( return Err(format!(
"OpenList service binary not found at: {:?}", "OpenList service binary not found at: {binary_path:?}"
binary_path
)); ));
} }
@@ -51,8 +47,7 @@ pub fn get_rclone_binary_path() -> Result<PathBuf, String> {
if !binary_path.exists() { if !binary_path.exists() {
return Err(format!( return Err(format!(
"Rclone service binary not found at: {:?}", "Rclone service binary not found at: {binary_path:?}"
binary_path
)); ));
} }

View File

@@ -148,6 +148,10 @@ export class TauriAPI {
return await invoke('save_settings', { settings }) return await invoke('save_settings', { settings })
} }
static async saveSettingsWithUpdatePort(settings: MergedSettings): Promise<boolean> {
return await invoke('save_settings_with_update_port', { settings })
}
static async resetSettings(): Promise<MergedSettings | null> { static async resetSettings(): Promise<MergedSettings | null> {
return await invoke('reset_settings') return await invoke('reset_settings')
} }

View File

@@ -157,8 +157,8 @@ const gridColor = computed(() => {
return document.documentElement.classList.contains('dark') ? '#374151' : '#e5e7eb' return document.documentElement.classList.contains('dark') ? '#374151' : '#e5e7eb'
}) })
const checkServiceHealth = async () => { const checkCoreHealth = async () => {
await store.refreshServiceStatus() await store.refreshOpenListCoreStatus()
if (!isCoreRunning.value) { if (!isCoreRunning.value) {
dataPoints.value.push({ dataPoints.value.push({
timestamp: Date.now(), timestamp: Date.now(),
@@ -172,7 +172,7 @@ const checkServiceHealth = async () => {
const startTime = Date.now() const startTime = Date.now()
try { try {
await store.refreshServiceStatus() await store.refreshOpenListCoreStatus()
const endTime = Date.now() const endTime = Date.now()
const responseTimeMs = endTime - startTime const responseTimeMs = endTime - startTime
@@ -228,7 +228,7 @@ onMounted(async () => {
startTime.value = Date.now() startTime.value = Date.now()
} }
monitoringInterval.value = window.setInterval(checkServiceHealth, (store.settings.app.monitor_interval || 5) * 1000) monitoringInterval.value = window.setInterval(checkCoreHealth, (store.settings.app.monitor_interval || 5) * 1000)
window.addEventListener('resize', updateChartSize) window.addEventListener('resize', updateChartSize)
}) })

View File

@@ -20,7 +20,12 @@
<span>{{ t('dashboard.quickActions.restart') }}</span> <span>{{ t('dashboard.quickActions.restart') }}</span>
</button> </button>
<button @click="openWebUI" :disabled="!isCoreRunning" class="action-btn web-btn"> <button
@click="openWebUI"
:disabled="!isCoreRunning"
class="action-btn web-btn"
:title="store.openListCoreUrl"
>
<ExternalLink :size="18" /> <ExternalLink :size="18" />
<span>{{ t('dashboard.quickActions.openWeb') }}</span> <span>{{ t('dashboard.quickActions.openWeb') }}</span>
</button> </button>

View File

@@ -14,7 +14,7 @@ export const useCoreActions = () => {
const stopOpenListCore = async () => { const stopOpenListCore = async () => {
try { try {
await store.startOpenListCore() await store.stopOpenListCore()
} catch (error) { } catch (error) {
console.error('Failed to stop service:', error) console.error('Failed to stop service:', error)
throw error throw error

View File

@@ -18,8 +18,6 @@ export const useTray = () => {
} }
const handleTrayServiceAction = async (action: string) => { const handleTrayServiceAction = async (action: string) => {
console.log('Tray core action:', action)
try { try {
switch (action) { switch (action) {
case 'start': case 'start':

View File

@@ -304,7 +304,7 @@ export const useAppStore = defineStore('app', () => {
const isCoreRunning = computed(() => openlistCoreStatus.value.running) const isCoreRunning = computed(() => openlistCoreStatus.value.running)
const openListCoreUrl = computed(() => { const openListCoreUrl = computed(() => {
const protocol = settings.value.openlist.ssl_enabled ? 'https' : 'http' const protocol = settings.value.openlist.ssl_enabled ? 'https' : 'http'
return `${protocol}://localhost:${openlistCoreStatus.value.port}` return `${protocol}://localhost:${settings.value.openlist.port}`
}) })
async function loadSettings() { async function loadSettings() {
@@ -333,6 +333,17 @@ export const useAppStore = defineStore('app', () => {
} }
} }
async function saveSettingsWithUpdatePort(): Promise<boolean> {
try {
await TauriAPI.saveSettingsWithUpdatePort(settings.value)
return true
} catch (err) {
error.value = 'Failed to save settings'
console.error('Failed to save settings:', err)
return false
}
}
async function resetSettings() { async function resetSettings() {
try { try {
loading.value = true loading.value = true
@@ -392,7 +403,7 @@ export const useAppStore = defineStore('app', () => {
} }
openlistProcessId.value = processId openlistProcessId.value = processId
await refreshServiceStatus() await refreshOpenListCoreStatus()
await TauriAPI.updateTrayMenu(openlistCoreStatus.value.running) await TauriAPI.updateTrayMenu(openlistCoreStatus.value.running)
} catch (err: any) { } catch (err: any) {
@@ -446,7 +457,7 @@ export const useAppStore = defineStore('app', () => {
error.value = errorMessage error.value = errorMessage
console.error('Failed to stop service:', err) console.error('Failed to stop service:', err)
try { try {
await refreshServiceStatus() await refreshOpenListCoreStatus()
} catch (refreshErr) { } catch (refreshErr) {
console.error('Failed to refresh service status after stop failure:', refreshErr) console.error('Failed to refresh service status after stop failure:', refreshErr)
} }
@@ -485,19 +496,19 @@ export const useAppStore = defineStore('app', () => {
} }
const result = await TauriAPI.restartProcess(id) const result = await TauriAPI.restartProcess(id)
if (!result) { if (!result) {
throw new Error('Failed to restart OpenList Core service - service returned false') throw new Error('Failed to restart OpenList Core - service returned false')
} }
await refreshServiceStatus() await refreshOpenListCoreStatus()
await TauriAPI.updateTrayMenu(openlistCoreStatus.value.running) await TauriAPI.updateTrayMenu(openlistCoreStatus.value.running)
} catch (err: any) { } catch (err: any) {
const errorMessage = `Failed to restart service: ${formatError(err)}` const errorMessage = `Failed to restart core: ${formatError(err)}`
error.value = errorMessage error.value = errorMessage
console.error('Failed to restart service:', err) console.error('Failed to restart core:', err)
try { try {
await refreshServiceStatus() await refreshOpenListCoreStatus()
await safeUpdateTrayMenu(openlistCoreStatus.value.running) await safeUpdateTrayMenu(openlistCoreStatus.value.running)
} catch (refreshErr) { } catch (refreshErr) {
console.error('Failed to refresh service status after restart failure:', refreshErr) console.error('Failed to refresh core status after restart failure:', refreshErr)
} }
throw err throw err
} finally { } finally {
@@ -505,7 +516,7 @@ export const useAppStore = defineStore('app', () => {
} }
} }
async function refreshServiceStatus() { async function refreshOpenListCoreStatus() {
try { try {
const status = await TauriAPI.getOpenListCoreStatus() const status = await TauriAPI.getOpenListCoreStatus()
const statusChanged = openlistCoreStatus.value.running !== status.running const statusChanged = openlistCoreStatus.value.running !== status.running
@@ -605,13 +616,13 @@ export const useAppStore = defineStore('app', () => {
} }
} }
async function autoStartServiceIfEnabled() { async function autoStartCoreIfEnabled() {
try { try {
if (settings.value.openlist.auto_launch) { if (settings.value.openlist.auto_launch) {
await startOpenListCore() await startOpenListCore()
} }
} catch (err) { } catch (err) {
console.warn('Failed to auto-start service:', err) console.warn('Failed to auto-start core:', err)
} }
} }
@@ -654,9 +665,10 @@ export const useAppStore = defineStore('app', () => {
try { try {
initTutorial() initTutorial()
await loadSettings() await loadSettings()
await refreshServiceStatus() await refreshOpenListCoreStatus()
await TauriAPI.updateTrayMenuDelayed(openlistCoreStatus.value.running)
await loadLogs() await loadLogs()
await autoStartServiceIfEnabled() await autoStartCoreIfEnabled()
await loadRemoteConfigs() await loadRemoteConfigs()
await loadMountInfos() await loadMountInfos()
} catch (err) { } catch (err) {
@@ -761,13 +773,14 @@ export const useAppStore = defineStore('app', () => {
loadSettings, loadSettings,
saveSettings, saveSettings,
saveSettingsWithUpdatePort,
resetSettings, resetSettings,
startOpenListCore, startOpenListCore,
stopOpenListCore, stopOpenListCore,
restartOpenListCore, restartOpenListCore,
enableAutoLaunch, enableAutoLaunch,
refreshServiceStatus, refreshOpenListCoreStatus,
loadLogs, loadLogs,
clearLogs, clearLogs,
listFiles, listFiles,

View File

@@ -19,10 +19,7 @@ const autoStartApp = ref(false)
const openlistCoreSettings = reactive({ ...store.settings.openlist }) const openlistCoreSettings = reactive({ ...store.settings.openlist })
const rcloneSettings = reactive({ ...store.settings.rclone }) const rcloneSettings = reactive({ ...store.settings.rclone })
const appSettings = reactive({ ...store.settings.app }) const appSettings = reactive({ ...store.settings.app })
let originalOpenlistPort = openlistCoreSettings.port || 5244
const isOpenListPortChanged = computed(() => {
return openlistCoreSettings.port !== store.settings.openlist.port
})
watch(autoStartApp, async newValue => { watch(autoStartApp, async newValue => {
if (newValue) { if (newValue) {
@@ -72,6 +69,7 @@ onMounted(async () => {
if (!appSettings.monitor_interval) appSettings.monitor_interval = 5 if (!appSettings.monitor_interval) appSettings.monitor_interval = 5
if (appSettings.auto_update_enabled === undefined) appSettings.auto_update_enabled = true if (appSettings.auto_update_enabled === undefined) appSettings.auto_update_enabled = true
originalOpenlistPort = openlistCoreSettings.port || 5244
}) })
const hasUnsavedChanges = computed(() => { const hasUnsavedChanges = computed(() => {
@@ -108,8 +106,11 @@ const handleSave = async () => {
store.settings.openlist = { ...openlistCoreSettings } store.settings.openlist = { ...openlistCoreSettings }
store.settings.rclone = { ...rcloneSettings } store.settings.rclone = { ...rcloneSettings }
store.settings.app = { ...appSettings } store.settings.app = { ...appSettings }
if (originalOpenlistPort !== openlistCoreSettings.port) {
await store.saveSettings() await store.saveSettingsWithUpdatePort()
} else {
await store.saveSettings()
}
message.value = t('settings.saved') message.value = t('settings.saved')
messageType.value = 'success' messageType.value = 'success'
} catch (error) { } catch (error) {

154
yarn.lock
View File

@@ -1353,6 +1353,13 @@ ansi-escapes@^4.2.1, ansi-escapes@^4.3.2:
dependencies: dependencies:
type-fest "^0.21.3" type-fest "^0.21.3"
ansi-escapes@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-7.0.0.tgz#00fc19f491bbb18e1d481b97868204f92109bfe7"
integrity sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==
dependencies:
environment "^1.0.0"
ansi-regex@^3.0.0: ansi-regex@^3.0.0:
version "3.0.1" version "3.0.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1"
@@ -1387,6 +1394,11 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0:
dependencies: dependencies:
color-convert "^2.0.1" color-convert "^2.0.1"
ansi-styles@^6.0.0, ansi-styles@^6.2.1:
version "6.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5"
integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==
argparse@^2.0.1: argparse@^2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
@@ -1563,6 +1575,14 @@ cli-spinners@^2.5.0, cli-spinners@^2.9.2:
resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41"
integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==
cli-truncate@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-4.0.0.tgz#6cc28a2924fee9e25ce91e973db56c7066e6172a"
integrity sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==
dependencies:
slice-ansi "^5.0.0"
string-width "^7.0.0"
cli-width@^2.0.0: cli-width@^2.0.0:
version "2.2.1" version "2.2.1"
resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48"
@@ -1616,6 +1636,16 @@ color-name@~1.1.4:
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
colorette@^2.0.20:
version "2.0.20"
resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a"
integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==
commander@^14.0.0:
version "14.0.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-14.0.0.tgz#f244fc74a92343514e56229f16ef5c5e22ced5e9"
integrity sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA==
commitizen@^4.0.3, commitizen@^4.3.1: commitizen@^4.0.3, commitizen@^4.3.1:
version "4.3.1" version "4.3.1"
resolved "https://registry.yarnpkg.com/commitizen/-/commitizen-4.3.1.tgz#f0e0e4b7ae3fafc92e444bbb78f2ded5a1d4311a" resolved "https://registry.yarnpkg.com/commitizen/-/commitizen-4.3.1.tgz#f0e0e4b7ae3fafc92e444bbb78f2ded5a1d4311a"
@@ -1743,7 +1773,14 @@ cosmiconfig@^9.0.0:
js-yaml "^4.1.0" js-yaml "^4.1.0"
parse-json "^5.2.0" parse-json "^5.2.0"
cross-spawn@^7.0.6: cross-env@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
integrity sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==
dependencies:
cross-spawn "^7.0.1"
cross-spawn@^7.0.1, cross-spawn@^7.0.6:
version "7.0.6" version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
@@ -1803,7 +1840,7 @@ de-indent@^1.0.2:
resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d" resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d"
integrity sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg== integrity sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==
debug@4, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: debug@4, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4, debug@^4.4.1:
version "4.4.1" version "4.4.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b" resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b"
integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ== integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==
@@ -1887,6 +1924,11 @@ env-paths@^2.2.1:
resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2"
integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==
environment@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/environment/-/environment-1.1.0.tgz#8e86c66b180f363c7ab311787e0259665f45a9f1"
integrity sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==
error-ex@^1.3.1: error-ex@^1.3.1:
version "1.3.2" version "1.3.2"
resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
@@ -2097,6 +2139,11 @@ esutils@^2.0.2:
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
eventemitter3@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4"
integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==
expand-tilde@^2.0.0, expand-tilde@^2.0.2: expand-tilde@^2.0.0, expand-tilde@^2.0.2:
version "2.0.2" version "2.0.2"
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
@@ -2611,6 +2658,18 @@ is-fullwidth-code-point@^3.0.0:
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
is-fullwidth-code-point@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz#fae3167c729e7463f8461ce512b080a49268aa88"
integrity sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==
is-fullwidth-code-point@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz#9609efced7c2f97da7b60145ef481c787c7ba704"
integrity sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==
dependencies:
get-east-asian-width "^1.0.0"
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3:
version "4.0.3" version "4.0.3"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
@@ -2761,11 +2820,44 @@ levn@^0.4.1:
prelude-ls "^1.2.1" prelude-ls "^1.2.1"
type-check "~0.4.0" type-check "~0.4.0"
lilconfig@^3.1.3:
version "3.1.3"
resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.3.tgz#a1bcfd6257f9585bf5ae14ceeebb7b559025e4c4"
integrity sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==
lines-and-columns@^1.1.6: lines-and-columns@^1.1.6:
version "1.2.4" version "1.2.4"
resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
lint-staged@^16.1.2:
version "16.1.2"
resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-16.1.2.tgz#8cb84daa844f39c7a9790dd2c0caa327125ef059"
integrity sha512-sQKw2Si2g9KUZNY3XNvRuDq4UJqpHwF0/FQzZR2M7I5MvtpWvibikCjUVJzZdGE0ByurEl3KQNvsGetd1ty1/Q==
dependencies:
chalk "^5.4.1"
commander "^14.0.0"
debug "^4.4.1"
lilconfig "^3.1.3"
listr2 "^8.3.3"
micromatch "^4.0.8"
nano-spawn "^1.0.2"
pidtree "^0.6.0"
string-argv "^0.3.2"
yaml "^2.8.0"
listr2@^8.3.3:
version "8.3.3"
resolved "https://registry.yarnpkg.com/listr2/-/listr2-8.3.3.tgz#815fc8f738260ff220981bf9e866b3e11e8121bf"
integrity sha512-LWzX2KsqcB1wqQ4AHgYb4RsDXauQiqhjLk+6hjbaeHG4zpjjVAB6wC/gz6X0l+Du1cN3pUB5ZlrvTbhGSNnUQQ==
dependencies:
cli-truncate "^4.0.0"
colorette "^2.0.20"
eventemitter3 "^5.0.1"
log-update "^6.1.0"
rfdc "^1.4.1"
wrap-ansi "^9.0.0"
locate-path@^6.0.0: locate-path@^6.0.0:
version "6.0.0" version "6.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
@@ -2851,6 +2943,17 @@ log-symbols@^6.0.0:
chalk "^5.3.0" chalk "^5.3.0"
is-unicode-supported "^1.3.0" is-unicode-supported "^1.3.0"
log-update@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/log-update/-/log-update-6.1.0.tgz#1a04ff38166f94647ae1af562f4bd6a15b1b7cd4"
integrity sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==
dependencies:
ansi-escapes "^7.0.0"
cli-cursor "^5.0.0"
slice-ansi "^7.1.0"
strip-ansi "^7.1.0"
wrap-ansi "^9.0.0"
longest@^2.0.1: longest@^2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/longest/-/longest-2.0.1.tgz#781e183296aa94f6d4d916dc335d0d17aefa23f8" resolved "https://registry.yarnpkg.com/longest/-/longest-2.0.1.tgz#781e183296aa94f6d4d916dc335d0d17aefa23f8"
@@ -2999,6 +3102,11 @@ mute-stream@^2.0.0:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-2.0.0.tgz#a5446fc0c512b71c83c44d908d5c7b7b4c493b2b" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-2.0.0.tgz#a5446fc0c512b71c83c44d908d5c7b7b4c493b2b"
integrity sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA== integrity sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==
nano-spawn@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/nano-spawn/-/nano-spawn-1.0.2.tgz#9853795681f0e96ef6f39104c2e4347b6ba79bf6"
integrity sha512-21t+ozMQDAL/UGgQVBbZ/xXvNO10++ZPuTmKRO8k9V3AClVRht49ahtDjfY8l1q6nSHOrE5ASfthzH3ol6R/hg==
nanoid@^3.3.11: nanoid@^3.3.11:
version "3.3.11" version "3.3.11"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b"
@@ -3235,6 +3343,11 @@ picomatch@^4.0.2:
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab"
integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==
pidtree@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.6.0.tgz#90ad7b6d42d5841e69e0a2419ef38f8883aa057c"
integrity sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==
pinia@^3.0.3: pinia@^3.0.3:
version "3.0.3" version "3.0.3"
resolved "https://registry.yarnpkg.com/pinia/-/pinia-3.0.3.tgz#f412019bdeb2f45e85927b432803190343e12d89" resolved "https://registry.yarnpkg.com/pinia/-/pinia-3.0.3.tgz#f412019bdeb2f45e85927b432803190343e12d89"
@@ -3463,6 +3576,22 @@ signal-exit@^4.1.0:
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04"
integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==
slice-ansi@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-5.0.0.tgz#b73063c57aa96f9cd881654b15294d95d285c42a"
integrity sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==
dependencies:
ansi-styles "^6.0.0"
is-fullwidth-code-point "^4.0.0"
slice-ansi@^7.1.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-7.1.0.tgz#cd6b4655e298a8d1bdeb04250a433094b347b9a9"
integrity sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==
dependencies:
ansi-styles "^6.2.1"
is-fullwidth-code-point "^5.0.0"
source-map-js@^1.0.2, source-map-js@^1.2.1: source-map-js@^1.0.2, source-map-js@^1.2.1:
version "1.2.1" version "1.2.1"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
@@ -3514,6 +3643,11 @@ stdin-discarder@^0.2.2:
resolved "https://registry.yarnpkg.com/stdin-discarder/-/stdin-discarder-0.2.2.tgz#390037f44c4ae1a1ae535c5fe38dc3aba8d997be" resolved "https://registry.yarnpkg.com/stdin-discarder/-/stdin-discarder-0.2.2.tgz#390037f44c4ae1a1ae535c5fe38dc3aba8d997be"
integrity sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ== integrity sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==
string-argv@^0.3.2:
version "0.3.2"
resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.2.tgz#2b6d0ef24b656274d957d54e0a4bbf6153dc02b6"
integrity sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==
string-width@^2.1.0: string-width@^2.1.0:
version "2.1.1" version "2.1.1"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
@@ -3531,7 +3665,7 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
is-fullwidth-code-point "^3.0.0" is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1" strip-ansi "^6.0.1"
string-width@^7.2.0: string-width@^7.0.0, string-width@^7.2.0:
version "7.2.0" version "7.2.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.2.0.tgz#b5bb8e2165ce275d4d43476dd2700ad9091db6dc" resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.2.0.tgz#b5bb8e2165ce275d4d43476dd2700ad9091db6dc"
integrity sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ== integrity sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==
@@ -3896,6 +4030,15 @@ wrap-ansi@^7.0.0:
string-width "^4.1.0" string-width "^4.1.0"
strip-ansi "^6.0.0" strip-ansi "^6.0.0"
wrap-ansi@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-9.0.0.tgz#1a3dc8b70d85eeb8398ddfb1e4a02cd186e58b3e"
integrity sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==
dependencies:
ansi-styles "^6.2.1"
string-width "^7.0.0"
strip-ansi "^7.1.0"
wrappy@1: wrappy@1:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
@@ -3911,6 +4054,11 @@ yallist@^5.0.0:
resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533"
integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==
yaml@^2.8.0:
version "2.8.0"
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.0.tgz#15f8c9866211bdc2d3781a0890e44d4fa1a5fff6"
integrity sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==
yargs-parser@^21.1.1: yargs-parser@^21.1.1:
version "21.1.1" version "21.1.1"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35"